diff --git a/.config/config.json b/.config/config.json index 6ace64a88b..ec8e712a2a 100644 --- a/.config/config.json +++ b/.config/config.json @@ -17,11 +17,6 @@ "api": "biohubbc-api", "app": "biohubbc-app" }, - "staticBranches": [ - "dev", - "test", - "prod" - ], "staticUrls": { "dev": "dev-biohubbc.apps.silver.devops.gov.bc.ca", "test": "test-biohubbc.apps.silver.devops.gov.bc.ca", @@ -37,11 +32,6 @@ "test": "n8n-af2668-test.apps.silver.devops.gov.bc.ca", "prod": "n8n-af2668-prod.apps.silver.devops.gov.bc.ca" }, - "certificateURL": { - "dev": "https://dev.oidc.gov.bc.ca/auth/realms/35r1iman/protocol/openid-connect/certs", - "test": "https://test.oidc.gov.bc.ca/auth/realms/35r1iman/protocol/openid-connect/certs", - "prod": "https://oidc.gov.bc.ca/auth/realms/35r1iman/protocol/openid-connect/certs" - }, "siteminderLogoutURL": { "dev": "https://logontest7.gov.bc.ca/clp-cgi/logoff.cgi", "test": "https://logontest7.gov.bc.ca/clp-cgi/logoff.cgi", @@ -49,19 +39,37 @@ }, "sso": { "dev": { - "url": "https://dev.oidc.gov.bc.ca/auth", - "clientId": "biohubbc", - "realm": "35r1iman" + "url": "https://dev.loginproxy.gov.bc.ca/auth", + "clientId": "sims-4461", + "realm": "standard", + "integrationId": "4461", + "adminHost": "https://loginproxy.gov.bc.ca/auth", + "adminUserName": "sims-svc-4464", + "apiHost": "https://api.loginproxy.gov.bc.ca/api/v1", + "keycloakSecret": "keycloak-admin-password", + "keycloakSecretAdminPassword": "keycloak_admin_password" }, "test": { - "url": "https://test.oidc.gov.bc.ca/auth", - "clientId": "biohubbc", - "realm": "35r1iman" + "url": "https://test.loginproxy.gov.bc.ca/auth", + "clientId": "sims-4461", + "realm": "standard", + "integrationId": "4461", + "adminHost": "https://loginproxy.gov.bc.ca/auth", + "adminUserName": "sims-svc-4464", + "apiHost": "https://api.loginproxy.gov.bc.ca/api/v1", + "keycloakSecret": "keycloak-admin-password", + "keycloakSecretAdminPassword": "keycloak_admin_password" }, "prod": { - "url": "https://oidc.gov.bc.ca/auth", - "clientId": "biohubbc", - "realm": "35r1iman" + "url": "https://loginproxy.gov.bc.ca/auth", + "clientId": "sims-4461", + "realm": "standard", + "integrationId": "4461", + "adminHost": "https://loginproxy.gov.bc.ca/auth", + "adminUserName": "sims-svc-4464", + "apiHost": "https://api.loginproxy.gov.bc.ca/api/v1", + "keycloakSecret": "keycloak-admin-password", + "keycloakSecretAdminPassword": "keycloak_admin_password" } } } diff --git a/.github/workflows/addComments.yml b/.github/workflows/addComments.yml index 81d26f32bb..39aeb3515e 100644 --- a/.github/workflows/addComments.yml +++ b/.github/workflows/addComments.yml @@ -3,9 +3,7 @@ name: Add Comments on: pull_request: - types: [opened] - branches: - - dev + types: [opened, ready_for_review] jobs: addOpenshiftURLComment: diff --git a/.github/workflows/cleanClosedPR.yml b/.github/workflows/cleanClosedPR.yml index c393772b86..f434f79c5f 100644 --- a/.github/workflows/cleanClosedPR.yml +++ b/.github/workflows/cleanClosedPR.yml @@ -1,40 +1,45 @@ -# Clean out the deployment artifacts when a PR is closed, without a merge. -# This clean out gets rid of the PR artifacts in Dev and in Tools. -name: Clean out Dev from closed PR Artifacts +# Clean out all deployment artifacts when a PR is closed, but not merged. +# Will attempt to remove all artifacts from any PR that was opened against any branch (and then closed), except for test and prod. +name: Clean Closed PR Artifacts + on: pull_request: - branches: [dev] + branches: + - '*' + - '!test' + - '!prod' types: [closed] + jobs: clean: name: Clean Deployment Artifacts for API and App in Dev and Tools environment runs-on: ubuntu-latest - # Only do this when the PR was not merged and only for dev - if: ${{ github.event.pull_request.merged != true && github.base_ref == 'dev' && github.event.pull_request.draft == false }} + # Don't run if the PR was merged + if: ${{ github.event.pull_request.merged != true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} steps: # Checkout the PR branch - name: Checkout Dev Branch - Contains the Pipeline Code - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: - ref: 'dev' + ref: "dev" # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/.pipeline/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- ${{ runner.os }}-build- @@ -44,27 +49,34 @@ jobs: - name: Log in to OpenShift run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443 + # Clean the database build/deployment artifacts + - name: Clean Database Artifacts + working-directory: "./database/.pipeline/" + run: | + npm install --only=production + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev + # Clean the api deployment artifacts - name: Clean API Deployment working-directory: "./api/.pipeline/" run: | - npm ci - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=build - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=dev + npm install --only=production + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev # Clean the app deployment artifacts - name: Clean APP Deployment working-directory: "./app/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=build - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=dev + npm install --only=production + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev # Clean the reamaining build/deployment artifacts - name: Clean remaining Artifacts run: | oc project af2668-dev - oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $BUILD_ID | awk '{print "oc delete " $1}' | bash + oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $PR_NUMBER | awk '{print "oc delete " $1}' | bash oc project af2668-tools - oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $BUILD_ID | awk '{print "oc delete " $1}' | bash + oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $PR_NUMBER | awk '{print "oc delete " $1}' | bash diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 8f6d3a8664..44b551761a 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -4,12 +4,16 @@ name: PR-Based Deploy on OpenShift on: pull_request: - types: [opened, reopened, synchronize] + types: [opened, reopened, synchronize, ready_for_review] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.number }} + cancel-in-progress: true jobs: # Print variables for logging and debugging purposes checkEnv: - name: Check Env variables + name: Print Env variables runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false }} steps: @@ -24,13 +28,15 @@ jobs: echo Git Labels: "$LABELS" echo PR in Draft: ${{ github.event.pull_request.draft }} + # Scale down any existing OpenShift pods for this PR deployment + # Why? The new pods will be deployed before the existing pods are terminated, and twice the resources will be needed + # in that moment. If not enough resources are available to spin up the new pods, then they may fail to deploy. scaleDownPods: name: Scale down the pods for this PR runs-on: ubuntu-latest + if: ${{ github.event.pull_request.merged == false }} env: - BUILD_ID: ${{ github.event.number }} - needs: - - checkEnv + PR_NUMBER: ${{ github.event.number }} steps: # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -41,7 +47,40 @@ jobs: - name: Scale down run: | oc project af2668-dev - oc get deploymentconfig --selector env-id=$BUILD_ID -o name | awk '{print "oc scale --replicas=0 " $1}' | bash + oc get deploymentconfig --selector env-id=$PR_NUMBER -o name | awk '{print "oc scale --replicas=0 " $1}' | bash + + # Checkout the repo once and cache it for use in subsequent jobs + checkoutRepo: + name: Checkout and cache target branch + runs-on: ubuntu-latest + if: ${{ github.event.pull_request.merged == false }} + env: + PR_NUMBER: ${{ github.event.number }} + steps: + # Install Node - for `node` and `npm` commands + # Note: This already uses actions/cache internally, so repeat calls in subsequent jobs are not a performance hit + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 14 + + - name: Checkout Target Branch + uses: actions/checkout@v3 + with: + persist-credentials: false + + # Cache the repo + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + # Cache repo based on the commit sha that triggered the workflow + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} # Build the Database image buildDatabase: @@ -49,33 +88,44 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -87,9 +137,8 @@ jobs: - name: Build Database Image working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run db:build -- --pr=$BUILD_ID + npm ci --only=production + DEBUG=* npm run db:build -- --pr=$PR_NUMBER # Build the Database Setup image buildDatabaseSetup: @@ -97,33 +146,44 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -135,9 +195,8 @@ jobs: - name: Build Database Setup Image working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run db-setup:build -- --pr=$BUILD_ID + npm ci --only=production + DEBUG=* npm run db-setup:build -- --pr=$PR_NUMBER # Build the API image buildAPI: @@ -145,33 +204,44 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -183,43 +253,53 @@ jobs: - name: Build API Image working-directory: "./api/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run build -- --pr=$BUILD_ID + npm ci --only=production + DEBUG=* npm run build -- --pr=$PR_NUMBER # Build the web frontend app image buildAPP: - name: Build App Image + name: Build APP Image runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -231,9 +311,8 @@ jobs: - name: Build APP Image working-directory: "./app/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run build -- --pr=$BUILD_ID + npm ci --only=production + DEBUG=* npm run build -- --pr=$PR_NUMBER # Deploy Database image deployDatabase: @@ -241,34 +320,45 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} - BRANCH: ${{ github.base_ref }} + PR_NUMBER: ${{ github.event.number }} needs: + - scaleDownPods - buildDatabase steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -280,8 +370,8 @@ jobs: - name: Deploy Database Image working-directory: "./database/.pipeline/" run: | - npm install - DEBUG=* npm run db:deploy -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run db:deploy -- --pr=$PR_NUMBER --env=dev # Deploy Database image deployDatabaseSetup: @@ -289,35 +379,46 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} - BRANCH: ${{ github.base_ref }} + PR_NUMBER: ${{ github.event.number }} needs: + - scaleDownPods - buildDatabaseSetup - deployDatabase steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -329,9 +430,8 @@ jobs: - name: Deploy Database Setup Image working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run db-setup:deploy -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run db-setup:deploy -- --pr=$PR_NUMBER --env=dev # Deploy API image deployAPI: @@ -339,35 +439,46 @@ jobs: runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} - BRANCH: ${{ github.base_ref }} + PR_NUMBER: ${{ github.event.number }} needs: + - scaleDownPods - buildAPI - deployDatabase steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -379,44 +490,54 @@ jobs: - name: Deploy API Image working-directory: "./api/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run deploy -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run deploy -- --pr=$PR_NUMBER --env=dev - # Deploy App image + # Deploy APP image deployAPP: - name: Deploy App Image + name: Deploy APP Image runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == false && github.event.pull_request.draft == false }} env: - BUILD_ID: ${{ github.event.number }} - BRANCH: ${{ github.base_ref }} + PR_NUMBER: ${{ github.event.number }} needs: + - scaleDownPods - buildAPP steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -425,12 +546,11 @@ jobs: run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443 # Deploy the app image - - name: Deploy App Image + - name: Deploy APP Image working-directory: "./app/.pipeline" run: | - npm install - # npm ci - DEBUG=* npm run deploy -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run deploy -- --pr=$PR_NUMBER --env=dev cypress-run: runs-on: ubuntu-latest @@ -439,12 +559,12 @@ jobs: CYPRESS_RECORD_KEY: ${{ secrets.RECORDING_KEY }} CYPRESS_username: ${{ secrets.CYPRESS_USER_NAME }} CYPRESS_password: ${{ secrets.CYPRESS_PASSWORD }} - CYPRESS_BASE_URL: 'https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca' - CYPRESS_host: 'https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca' + CYPRESS_BASE_URL: "https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca" + CYPRESS_host: "https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca" CYPRESS_ENVIRONMENT: ${{ github.base_ref }} - CYPRESS_authRealm: '35r1iman' - CYPRESS_authClientId: 'biohubbc' - CYPRESS_authUrl: 'https://${{ github.base_ref }}.oidc.gov.bc.ca' + CYPRESS_authRealm: "35r1iman" + CYPRESS_authClientId: "biohubbc" + CYPRESS_authUrl: "https://${{ github.base_ref }}.oidc.gov.bc.ca" needs: - deployDatabase - deployDatabaseSetup @@ -453,12 +573,12 @@ jobs: steps: # Checkout the PR branch - name: Checkout Target Branch - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Wait for API response uses: nev7n/wait_for_response@v1.0.1 with: - url: 'https://biohubbc-api-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca/version' + url: "https://biohubbc-api-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca/version" responseCode: 200 timeout: 240000 interval: 500 @@ -466,16 +586,16 @@ jobs: - name: Wait for APP response uses: nev7n/wait_for_response@v1.0.1 with: - url: 'https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca' + url: "https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca" responseCode: 200 timeout: 120000 interval: 500 # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: - node-version: 16 + node-version: 14 - name: E2E Smoke tests uses: cypress-io/github-action@v2 @@ -484,7 +604,7 @@ jobs: id: smoke continue-on-error: false with: - wait-on: 'https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca' + wait-on: "https://biohubbc-app-${{ github.event.number }}-af2668-dev.apps.silver.devops.gov.bc.ca" wait-on-timeout: 120 record: true working-directory: testing/e2e @@ -501,4 +621,3 @@ jobs: echo $CYPRESS_authRealm echo $CYPRESS_authClientId echo $CYPRESS_authUrl - diff --git a/.github/workflows/deployStatic.yml b/.github/workflows/deployStatic.yml index f180460115..29ff7417eb 100644 --- a/.github/workflows/deployStatic.yml +++ b/.github/workflows/deployStatic.yml @@ -4,7 +4,7 @@ name: Static Deploy on OpenShift on: pull_request: - types: [opened, reopened, synchronize, closed] + types: [closed] branches: - dev - test @@ -13,7 +13,7 @@ on: jobs: # Print variables for logging and debugging purposes checkEnv: - name: Check Env variables + name: Print Env variables runs-on: ubuntu-latest if: ${{ github.event.pull_request.merged == true }} steps: @@ -28,13 +28,15 @@ jobs: echo Git Labels: "$LABELS" echo PR in Draft: ${{ github.event.pull_request.draft }} + # Scale down any existing OpenShift pods for this PR deployment + # Why? The new pods will be deployed before the existing pods are terminated, and twice the resources will be needed + # in that moment. If not enough resources are available to spin up the new pods, then they may fail to deploy. scaleDownPods: name: Scale down the pods for this PR runs-on: ubuntu-latest + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} - needs: - - checkEnv + PR_NUMBER: ${{ github.event.number }} steps: # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -45,42 +47,86 @@ jobs: - name: Scale down run: | oc project af2668-dev - oc get deploymentconfig --selector env-id=$BUILD_ID -o name | awk '{print "oc scale --replicas=0 " $1}' | bash + oc get deploymentconfig --selector env-id=$PR_NUMBER -o name | awk '{print "oc scale --replicas=0 " $1}' | bash + + # Checkout the repo once and cache it for use in subsequent jobs + checkoutRepo: + name: Checkout and cache target branch + runs-on: ubuntu-latest + if: ${{ github.event.pull_request.merged == true }} + env: + PR_NUMBER: ${{ github.event.number }} + steps: + # Install Node - for `node` and `npm` commands + # Note: already uses actions/cache internally, so repeat calls are not a performance hit + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 14 + + - name: Checkout Target Branch + uses: actions/checkout@v3 + with: + persist-credentials: false + + # Cache the repo + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + # Cache repo based on the commit sha that triggered the workflow + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} # Build the Database image buildDatabase: name: Build Database Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -92,43 +138,54 @@ jobs: - name: Build Database Image working-directory: "./database/.pipeline/" run: | - npm install - DEBUG=* npm run db:build -- --pr=$BUILD_ID --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run db:build -- --pr=$PR_NUMBER --branch=$BRANCH --type=static # Build the Database Setup image buildDatabaseSetup: name: Build Database Setup Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -140,46 +197,58 @@ jobs: - name: Build Database Setup Image working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run db-setup:build -- --pr=$BUILD_ID --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run db-setup:build -- --pr=$PR_NUMBER --branch=$BRANCH --type=static # Build the API image buildAPI: name: Build API Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. + # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. + # PR's must originate from a branch off the original repo or else all openshift `oc` commands will fail. - name: Log in to OpenShift run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443 @@ -187,44 +256,54 @@ jobs: - name: Build API Image working-directory: "./api/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run build -- --pr=$BUILD_ID --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run build -- --pr=$PR_NUMBER --branch=$BRANCH --type=static # Build the web frontend app buildAPP: - name: Build App Image + name: Build APP Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: - - scaleDownPods + - checkoutRepo steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -236,44 +315,55 @@ jobs: - name: Build APP Image working-directory: "./app/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run build -- --pr=$BUILD_ID --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run build -- --pr=$PR_NUMBER --branch=$BRANCH --type=static # Deploy Database image deployDatabase: name: Deploy Database Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: + - scaleDownPods - buildDatabase steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -285,45 +375,56 @@ jobs: - name: Deploy Database Image working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run db:deploy -- --pr=$BUILD_ID --env=$BRANCH --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run db:deploy -- --pr=$PR_NUMBER --env=$BRANCH --branch=$BRANCH --type=static - # Deploy Database image + # Deploy Database setup image deployDatabaseSetup: name: Deploy Database Setup Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: + - scaleDownPods - buildDatabaseSetup - deployDatabase steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -335,45 +436,56 @@ jobs: - name: Deploy Database Setup Image working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run db-setup:deploy -- --pr=$BUILD_ID --env=$BRANCH --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run db-setup:deploy -- --pr=$PR_NUMBER --env=$BRANCH --branch=$BRANCH --type=static # Deploy API image deployAPI: name: Deploy API Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: + - scaleDownPods - buildAPI - deployDatabase steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -385,44 +497,55 @@ jobs: - name: Deploy API Image working-directory: "./api/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run deploy -- --pr=$BUILD_ID --env=$BRANCH --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run deploy -- --pr=$PR_NUMBER --env=$BRANCH --branch=$BRANCH --type=static - # Deploy App image + # Deploy APP image deployAPP: - name: Deploy App Image + name: Deploy APP Image runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} needs: + - scaleDownPods - buildAPP steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -431,49 +554,81 @@ jobs: run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443 # Deploy the app image - - name: Deploy App Image + - name: Deploy APP Image working-directory: "./app/.pipeline" run: | - npm install - # npm ci - DEBUG=* npm run deploy -- --pr=$BUILD_ID --env=$BRANCH --branch=$BRANCH --type=static + npm ci --only=production + DEBUG=* npm run deploy -- --pr=$PR_NUMBER --env=$BRANCH --branch=$BRANCH --type=static + + cycleschemaspy: + name: Cycle SchemaSpy to refresh after database update in dev + runs-on: ubuntu-latest + if: ${{ github.event.pull_request.merged == true && github.base_ref == 'dev' }} + env: + PR_NUMBER: ${{ github.event.number }} + needs: + - deployDatabaseSetup + steps: + # Log in to OpenShift. + # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. + # PR's must originate from a branch off the original repo or else all openshift `oc` commands will fail. + - name: Log in to OpenShift + run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443 + + - name: Scale down + run: | + oc project af2668-dev + oc scale --replicas=0 dc schemaspy + oc scale --replicas=1 dc schemaspy # Clean build/deployment artifacts clean: name: Clean Build/Deployment Artifacts runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} + if: ${{ github.event.pull_request.merged == true }} needs: - - deployAPP - - deployAPI + - deployDatabase - deployDatabaseSetup + - deployAPI + - deployAPP env: - BUILD_ID: ${{ github.event.number }} + PR_NUMBER: ${{ github.event.number }} BRANCH: ${{ github.base_ref }} steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - # Install Node - for `node` and `npm` commands - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + # Cache Node modules - name: Cache node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-node-modules with: # npm cache files are stored in `~/.npm` on Linux/macOS path: ~/.npm - key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('**/package-lock.json') }} + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ hashFiles('*/package-lock.json') }} restore-keys: | ${{ runner.os }}-build-${{ env.cache-name }}- - ${{ runner.os }}-build- - ${{ runner.os }}- # Log in to OpenShift. # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. @@ -485,140 +640,30 @@ jobs: - name: Clean Database Artifacts working-directory: "./database/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=build - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev # Clean the api build/deployment artifacts - name: Clean API Artifacts working-directory: "./api/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=build - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev # Clean the app build/deployment artifacts - name: Clean APP Artifacts working-directory: "./app/.pipeline/" run: | - npm install - # npm ci - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=build - DEBUG=* npm run clean -- --pr=$BUILD_ID --env=dev + npm ci --only=production + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build + DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev # Clean the reamaining build/deployment artifacts - name: Clean remaining Artifacts run: | oc project af2668-dev - oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $BUILD_ID | awk '{print "oc delete " $1}' | bash + oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $PR_NUMBER | awk '{print "oc delete " $1}' | bash oc project af2668-tools - oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $BUILD_ID | awk '{print "oc delete " $1}' | bash - - cycleschemaspy: - name: Cycle SchemaSpy to refresh after database update in dev - runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false && github.base_ref == 'dev' }} - env: - BUILD_ID: ${{ github.event.number }} - needs: - - checkEnv - - deployDatabaseSetup - steps: - # Log in to OpenShift. - # Note: The secrets needed to log in are NOT available if the PR comes from a FORK. - # PR's must originate from a branch off the original repo or else all openshift `oc` commands will fail. - - name: Log in to OpenShift - run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443 - - - name: Scale down - run: | - oc project af2668-dev - oc scale --replicas=0 dc schemaspy - oc scale --replicas=1 dc schemaspy - - cypress-run: - runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false && github.base_ref != 'prod' }} - env: - CYPRESS_RECORD_KEY: ${{ secrets.RECORDING_KEY }} - CYPRESS_username: ${{ secrets.CYPRESS_USER_NAME }} - CYPRESS_password: ${{ secrets.CYPRESS_PASSWORD }} - CYPRESS_BASE_URL: "https://${{ github.base_ref }}-biohubbc.apps.silver.devops.gov.bc.ca" - CYPRESS_host: "https://${{ github.base_ref }}-biohubbc.apps.silver.devops.gov.bc.ca" - CYPRESS_ENVIRONMENT: ${{ github.base_ref }} - CYPRESS_authRealm: "35r1iman" - CYPRESS_authClientId: "biohubbc" - CYPRESS_authUrl: "https://${{ github.base_ref }}.oidc.gov.bc.ca" - needs: - - deployDatabase - - deployDatabaseSetup - - deployAPI - - deployAPP - steps: - # Checkout the PR branch - - name: Checkout Target Branch - uses: actions/checkout@v2 - - - name: Wait for API response - uses: nev7n/wait_for_response@v1.0.1 - with: - url: "https://api-${{ github.base_ref }}-biohubbc.apps.silver.devops.gov.bc.ca/version" - responseCode: 200 - timeout: 240000 - interval: 500 - - - name: Wait for APP response - uses: nev7n/wait_for_response@v1.0.1 - with: - url: "https://${{ github.base_ref }}-biohubbc.apps.silver.devops.gov.bc.ca" - responseCode: 200 - timeout: 240000 - interval: 500 - - # Install Node - for `node` and `npm` commands - - name: Setup Node.js - uses: actions/setup-node@v2 - with: - node-version: 16 - - - name: E2E Smoke tests - uses: cypress-io/github-action@v2 - # let's give this action an ID so we can refer - # to its output values later - id: smoke - continue-on-error: false - with: - wait-on: "https://${{ github.base_ref }}-biohubbc.apps.silver.devops.gov.bc.ca" - wait-on-timeout: 240 - record: true - working-directory: testing/e2e - - - name: Print Env Vars - run: | - echo Git Base Ref: ${{ github.base_ref }} - echo Git Change ID: ${{ github.event.number }} - echo Cypress BaseUrl: $CYPRESS_BASE_URL - echo Cypress Host: $CYPRESS_ENVIRONMENT - echo $CYPRESS_authRealm - echo $CYPRESS_authClientId - echo $CYPRESS_authUrl - - notify: - name: Discord Notification - runs-on: ubuntu-latest - if: ${{ github.event.pull_request.merged == true && github.event.pull_request.draft == false }} && always() - needs: # make sure the notification is sent AFTER the jobs you want included have completed - - deployAPP - - deployAPI - - deployDatabaseSetup - steps: - - name: Notify - uses: nobrayner/discord-webhook@v1 - with: - github-token: ${{ secrets.github_token }} - discord-webhook: ${{ secrets.DISCORD_WEBHOOK }} - title: "${{ github.workflow }}: {{STATUS}}" - username: ${{ github.actor }} - description: "PR: ${{ github.event.number }} - ${{ github.event.pull_request.title }}: was deployed in ${{ github.base_ref }}!" + oc get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep biohubbc | grep $PR_NUMBER | awk '{print "oc delete " $1}' | bash diff --git a/.github/workflows/e2e-test.yaml b/.github/workflows/e2e-test.yaml index ae5f2ff2cf..d93da0131d 100644 --- a/.github/workflows/e2e-test.yaml +++ b/.github/workflows/e2e-test.yaml @@ -5,7 +5,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: E2E Smoke tests uses: cypress-io/github-action@v2 diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 3102a53d2c..9c09cf9129 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -2,23 +2,75 @@ name: Formating Checks on: pull_request: - branches: - - dev + types: [opened, reopened, synchronize, ready_for_review] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.number }} + cancel-in-progress: true jobs: - format: - name: Running Formatter + # Checkout the repo once and cache it for use in subsequent jobs + checkoutRepo: + name: Checkout and cache target branch runs-on: ubuntu-latest + env: + PR_NUMBER: ${{ github.event.number }} steps: - - uses: actions/checkout@v2 + # Install Node - for `node` and `npm` commands + # Note: This already uses actions/cache internally, so repeat calls in subsequent jobs are not a performance hit + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 14 + + - name: Checkout Target Branch + uses: actions/checkout@v3 + with: + persist-credentials: false + # Cache the repo + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + # Cache repo based on the commit sha that triggered the workflow + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + formatDatabase: + name: Running Database Formatter + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: - name: Setup node - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache database node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-database-node-modules with: @@ -36,8 +88,36 @@ jobs: working-directory: database run: CI=true npm run format + formatAPI: + name: Running API Formatter + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: + - name: Setup node + uses: actions/setup-node@v3 + with: + node-version: 14 + + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache api node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-api-node-modules with: @@ -55,8 +135,36 @@ jobs: working-directory: api run: CI=true npm run format + formatAPP: + name: Running APP Formatter + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: + - name: Setup node + uses: actions/setup-node@v3 + with: + node-version: 14 + + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache app node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-app-node-modules with: diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 70066548bd..bc1aa5ccef 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -2,23 +2,75 @@ name: Linting Checks on: pull_request: - branches: - - dev + types: [opened, reopened, synchronize, ready_for_review] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.number }} + cancel-in-progress: true jobs: - lint: - name: Running Linter + # Checkout the repo once and cache it for use in subsequent jobs + checkoutRepo: + name: Checkout and cache target branch runs-on: ubuntu-latest + env: + PR_NUMBER: ${{ github.event.number }} steps: - - uses: actions/checkout@v2 + # Install Node - for `node` and `npm` commands + # Note: This already uses actions/cache internally, so repeat calls in subsequent jobs are not a performance hit + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 14 + + - name: Checkout Target Branch + uses: actions/checkout@v3 + with: + persist-credentials: false + # Cache the repo + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + # Cache repo based on the commit sha that triggered the workflow + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + lintDatabase: + name: Running Database Linter + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: - name: Setup node - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache database node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-database-node-modules with: @@ -36,8 +88,36 @@ jobs: working-directory: database run: CI=true npm run lint + lintAPI: + name: Running API Linter + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: + - name: Setup node + uses: actions/setup-node@v3 + with: + node-version: 14 + + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache api node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-api-node-modules with: @@ -55,8 +135,36 @@ jobs: working-directory: api run: CI=true npm run lint + lintAPP: + name: Running APP Linter + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: + - name: Setup node + uses: actions/setup-node@v3 + with: + node-version: 14 + + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache app node modules - uses: actions/cache@v2 + uses: actions/cache@v3 env: cache-name: cache-app-node-modules with: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cf36386051..dc652830c1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,27 +1,79 @@ name: Test Checks on: - push: - branches: - - dev pull_request: + types: [opened, reopened, synchronize, ready_for_review] + push: branches: - dev +concurrency: + group: ${{ github.workflow }}-${{ github.event.number }} + cancel-in-progress: true + jobs: - test: - name: Running Tests + # Checkout the repo once and cache it for use in subsequent jobs + checkoutRepo: + name: Checkout and cache target branch runs-on: ubuntu-latest + env: + PR_NUMBER: ${{ github.event.number }} steps: - - uses: actions/checkout@v2 + # Install Node - for `node` and `npm` commands + # Note: This already uses actions/cache internally, so repeat calls in subsequent jobs are not a performance hit + - name: Setup Node.js + uses: actions/setup-node@v3 + with: + node-version: 14 + + - name: Checkout Target Branch + uses: actions/checkout@v3 + with: + persist-credentials: false + + # Cache the repo + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + # Cache repo based on the commit sha that triggered the workflow + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + testAPI: + name: Running API Tests + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: - name: Setup node - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: 14 + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache api node modules - uses: actions/cache@v2.1.6 + uses: actions/cache@v3 env: cache-name: cache-api-node-modules with: @@ -37,17 +89,44 @@ jobs: - name: Run api tests with coverage working-directory: api - run: CI=true - npm run coverage + run: CI=true npm run coverage - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2.1.0 + uses: codecov/codecov-action@v3 with: token: ${{secrets.CODECOV_TOKEN}} fail_ci_if_error: false + testAPP: + name: Running APP Tests + runs-on: ubuntu-latest + needs: + - checkoutRepo + steps: + - name: Setup node + uses: actions/setup-node@v3 + with: + node-version: 14 + + # Load repo from cache + - name: Cache repo + uses: actions/cache@v3 + id: cache-repo + env: + cache-name: cache-repo + with: + path: ${{ github.workspace }}/* + key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }} + + # Checkout the branch if not restored via cache + - name: Checkout Target Branch + if: steps.cache-repo.outputs.cache-hit != 'true' + uses: actions/checkout@v3 + - name: Cache app node modules - uses: actions/cache@v2.1.6 + uses: actions/cache@v3 env: cache-name: cache-app-node-modules with: @@ -63,11 +142,10 @@ jobs: - name: Run app tests with coverage working-directory: app - run: CI=true - npm run coverage + run: CI=true npm run coverage - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2.1.0 + uses: codecov/codecov-action@v3 with: token: ${{secrets.CODECOV_TOKEN}} fail_ci_if_error: false diff --git a/.github/workflows/zap.yml b/.github/workflows/zap.yml index 917aa7b73e..c43a554405 100644 --- a/.github/workflows/zap.yml +++ b/.github/workflows/zap.yml @@ -9,7 +9,7 @@ jobs: CYPRESS_password: ${{ secrets.CYPRESS_PASSWORD }} steps: - name: Checkout - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: ref: dev - name: Subtitute Password diff --git a/.gitignore b/.gitignore index 72cc6bde03..77c754504e 100644 --- a/.gitignore +++ b/.gitignore @@ -113,3 +113,6 @@ dist .n8n n8n/.n8n n8n/.config + +# IDE custom workspace settings +.vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 3b66410730..0000000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "git.ignoreLimitWarning": true -} \ No newline at end of file diff --git a/Makefile b/Makefile index ccbc88eb55..781cd60ef8 100644 --- a/Makefile +++ b/Makefile @@ -9,9 +9,6 @@ # Apply the contents of the .env to the terminal, so that the docker-compose file can use them in its builds export $(shell sed 's/=.*//' .env) -.DEFAULT : help -.PHONY : setup close clean build-backend run-backend build-web run-web database app api db-setup db-migrate db-rollback n8n-setup n8n-export clamav install test cypress lint lint-fix format format-fix help - ## ------------------------------------------------------------------------------ ## Alias Commands ## - Performs logical groups of commands for your convenience @@ -35,6 +32,8 @@ n8n-setup: | build-n8n-setup run-n8n-setup ## Performs all commands necessary to n8n-export: | build-n8n-export run-n8n-export ## Performs all commands necessary to export the latest n8n credentials and workflows clamav: | build-clamav run-clamav ## Performs all commands necessary to run clamav +fix: | lint-fix format-fix ## Performs both lint-fix and format-fix commands + ## ------------------------------------------------------------------------------ ## Setup/Cleanup Commands ## ------------------------------------------------------------------------------ @@ -57,6 +56,13 @@ clean: ## Closes and cleans (removes) all project containers @echo "===============================================" @docker-compose -f docker-compose.yml down -v --rmi all --remove-orphans +prune: ## Deletes ALL docker artifacts (even those not associated to this project) + @echo -n "Delete ALL docker artifacts? [y/n] " && read ans && [ $${ans:-n} = y ] + @echo "===============================================" + @echo "Make: prune - deleting all docker artifacts + @echo "===============================================" + @docker system prune --all --volumes + ## ------------------------------------------------------------------------------ ## Build/Run Postgres DB Commands ## - Builds all of the SIMS postgres db projects (db, db_setup) @@ -159,7 +165,7 @@ run-db-setup: ## Run the database migrations and seeding build-db-migrate: ## Build the db knex migrations image @echo "===============================================" - @echo "Make: build-db-migrate - bnuilding db knex migrate image" + @echo "Make: build-db-migrate - building db knex migrate image" @echo "===============================================" @docker-compose -f docker-compose.yml build db_migrate @@ -315,6 +321,23 @@ format-fix: ## Runs `npm run format:fix` for all projects @echo "===============================================" @cd database && npm run format:fix && cd .. +## ------------------------------------------------------------------------------ +## Run `npm` commands for all projects ./.pipeline +## ------------------------------------------------------------------------------ +pipeline-install: ## Runs `npm install` for all projects + @echo "===============================================" + @echo "Running /api/.pipeline install" + @echo "===============================================" + @cd api/.pipeline && npm install && cd ../.. + @echo "===============================================" + @echo "Running /app/.pipeline install" + @echo "===============================================" + @cd app/.pipeline && npm install && cd ../.. + @echo "===============================================" + @echo "Running /database/.pipeline install" + @echo "===============================================" + @cd database/.pipeline && npm install && cd ../.. + ## ------------------------------------------------------------------------------ ## Run `docker logs -f` commands for all projects ## - You can include additional parameters by appaending an `args` param diff --git a/api/.pipeline/.eslintrc b/api/.pipeline/.eslintrc new file mode 100644 index 0000000000..1a9658bb1a --- /dev/null +++ b/api/.pipeline/.eslintrc @@ -0,0 +1,21 @@ +{ + "root": true, + "extends": ["eslint:recommended", "plugin:prettier/recommended"], + "parserOptions": { + "ecmaVersion": 2018, + "sourceType": "module" + }, + "plugins": ["prettier"], + "env": { + "es6": true, + "node": true, + "mongo": true, + "mocha": true, + "jest": true, + "jasmine": true + }, + "rules": { + "prettier/prettier": ["warn"], + "no-var": "error" + } +} diff --git a/api/.pipeline/.prettierrc b/api/.pipeline/.prettierrc new file mode 100644 index 0000000000..a064d97523 --- /dev/null +++ b/api/.pipeline/.prettierrc @@ -0,0 +1,16 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true, + "jsxBracketSameLine": true, + "requirePragma": false, + "insertPragma": false, + "proseWrap": "never", + "endOfLine": "lf", + "arrowParens": "always", + "htmlWhitespaceSensitivity": "ignore" +} \ No newline at end of file diff --git a/api/.pipeline/README.md b/api/.pipeline/README.md deleted file mode 100644 index a0e5bbaaed..0000000000 --- a/api/.pipeline/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# Openshift PR-Based Pipeline via GitHub Actions - -## Important files/folders - -- ### _./.github/workflows/_ - - The workflows executed by the GitHub Actions mechanism - - - #### _./github/workflows/deploy.yml_ - - The workflow that runs when a PR is opened. - - - #### _./github/workflows/deployStatic.yml_ - - The workflow that runs when a PR is closed/merged. - -- ### _./.config/config.json_ - - Root config object used in various parts of the pipeline - -- ### _./api/.pipeline/_ - - Contains all of the jobs executed as part of one or more GitHub workflows - - - #### _./api/.pipeline/package.json_ - - Defines the scripts executed in one or more steps (in a GitHub Workflow->job) - - - #### _./api/.pipeline/config.js_ - - Defines the scripts executed in one or more steps (in a GitHub Workflow->job) - - - #### _./api/.pipeline/lib/_ - - Defines additional config used by the pipeline, extending the root config. - - - #### _./api/.pipeline/utils/_ - - Contains general helper functions utilized by the task functions - -- ### _./api/openshift/_ - - Contains OpenShift yaml configs for builds/deployments/etc. These are utilized by the library functions when configuring pods/commands. - -- ### _./Dockerfile_ - Used to build the api image, as part of the execution of the OpenShift build config. - -## Important technologies/repos - -- ### _pipeline-cli_ - - The BCGov npm package that is leveraged heavily in the `./api/.pipeline/*` code. - - Repo: https://github.com/BCDevOps/pipeline-cli - -- ### _GitHub Workflows_ - - https://docs.github.com/en/actions/configuring-and-managing-workflows/configuring-a-workflow - -- ### _OpenShift_ - - https://docs.openshift.com/container-platform/3.11/welcome/index.html - -## General flow of pipeline execution - -1. GitHub PR is opened/closed/etc - _Workflow execution triggers are controlled using `on:` statements_ -2. GitHub Workflows are executed -3. Within a given workflow: - - 1. Jobs are executed - _The conditional execution of jobs can be controlled using `if:` statements_ - _The execution order of jobs can be controlled using `needs:` statements_ - - 2. Within a given job: - - 1. Steps are executed - 2. Eventually one of the steps will execute one or more npm commands - _These npm commands match package.json scripts in `.../.pipeline/package.json`_ - - 3. Within a given npm command: - - 1. The pacakge.json script runs, and executes the `.../.pipeline/` file it references - _This file may configure and execute one or more `.../.pipeline/lib/` tasks_ - 2. The `../.pipeline/lib` task will utilize the `pipeline-cli` tool and the `../openshift/` configs to configure and run OpenShift pods/commands diff --git a/api/.pipeline/api.build.js b/api/.pipeline/api.build.js deleted file mode 100644 index 323d9a9b24..0000000000 --- a/api/.pipeline/api.build.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict'; - -const apiBuildTask = require('./lib/api.build.js'); -const config = require('./config.js'); - -const settings = { ...config, phase: 'build' }; - -// Builds the api image -apiBuildTask(settings); diff --git a/api/.pipeline/api.deploy.js b/api/.pipeline/api.deploy.js deleted file mode 100644 index c9e065a614..0000000000 --- a/api/.pipeline/api.deploy.js +++ /dev/null @@ -1,9 +0,0 @@ -'use strict'; - -const config = require('./config.js'); -const apiDeployTask = require('./lib/api.deploy.js'); - -const settings = { ...config, phase: config.options.env }; - -// Deploys the api image -apiDeployTask(settings); diff --git a/api/.pipeline/config.js b/api/.pipeline/config.js index 5aced03a1f..b4a71f0bb5 100644 --- a/api/.pipeline/config.js +++ b/api/.pipeline/config.js @@ -1,29 +1,30 @@ 'use strict'; + +let process = require('process'); + let options = require('pipeline-cli').Util.parseArguments(); // The root config for common values const config = require('../../.config/config.json'); -const defaultHost = 'biohubbc-af2668-api.apps.silver.devops.gov.bc.ca'; -const defaultHostAPP = 'biohubbc-af2668-dev.apps.silver.devops.gov.bc.ca'; +const appName = config.module.app; +const name = config.module.api; +const dbName = config.module.db; -const appName = (config.module && config.module['app']) || 'biohubbc-app'; -const name = (config.module && config.module['api']) || 'biohubbc-api'; -const dbName = (config.module && config.module['db']) || 'biohubbc-db'; +const version = config.version; -const changeId = options.pr || `${Math.floor(Date.now() * 1000) / 60.0}`; // aka pull-request or branch -const version = config.version || '1.0.0'; +const changeId = options.pr; // pull-request number or branch name // A static deployment is when the deployment is updating dev, test, or prod (rather than a temporary PR) +// See `--type=static` in the `deployStatic.yml` git workflow const isStaticDeployment = options.type === 'static'; const deployChangeId = (isStaticDeployment && 'deploy') || changeId; const branch = (isStaticDeployment && options.branch) || null; const tag = (branch && `build-${version}-${changeId}-${branch}`) || `build-${version}-${changeId}`; -const staticBranches = config.staticBranches || []; -const staticUrlsAPI = config.staticUrlsAPI || {}; -const staticUrls = config.staticUrls || {}; +const staticUrlsAPI = config.staticUrlsAPI; +const staticUrls = config.staticUrls; const processOptions = (options) => { const result = { ...options }; @@ -61,10 +62,12 @@ const phases = { version: `${version}-${changeId}`, tag: tag, env: 'build', - elasticsearchURL: 'https://elasticsearch-af2668-dev.apps.silver.devops.gov.bc.ca', tz: config.timezone.api, branch: branch, - logLevel: (isStaticDeployment && 'info') || 'debug' + cpuRequest: '100m', + cpuLimit: '1250m', + memoryRequest: '512Mi', + memoryLimit: '3Gi' }, dev: { namespace: 'af2668-dev', @@ -76,20 +79,23 @@ const phases = { instance: `${name}-dev-${deployChangeId}`, version: `${deployChangeId}-${changeId}`, tag: `dev-${version}-${deployChangeId}`, - host: - (isStaticDeployment && (staticUrlsAPI.dev || defaultHost)) || - `${name}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, - appHost: - (isStaticDeployment && (staticUrls.dev || defaultHostAPP)) || - `${appName}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, + host: (isStaticDeployment && staticUrlsAPI.dev) || `${name}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, + appHost: (isStaticDeployment && staticUrls.dev) || `${appName}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, backboneApiHost: 'https://api-dev-biohub-platform.apps.silver.devops.gov.bc.ca', + backboneIntakePath: '/api/dwc/submission/intake', + backboneIntakeEnabled: true, env: 'dev', - elasticsearchURL: 'https://elasticsearch-af2668-dev.apps.silver.devops.gov.bc.ca', + elasticsearchURL: 'http://es01:9200', + elasticsearchTaxonomyIndex: 'taxonomy_2.0.0', tz: config.timezone.api, - certificateURL: config.certificateURL.dev, - replicas: 1, - maxReplicas: 1, - logLevel: (isStaticDeployment && 'info') || 'debug' + sso: config.sso.dev, + logLevel: 'debug', + cpuRequest: '100m', + cpuLimit: '500m', + memoryRequest: '512Mi', + memoryLimit: '2Gi', + replicas: (isStaticDeployment && '2') || '1', + replicasMax: (isStaticDeployment && '3') || '1' }, test: { namespace: 'af2668-test', @@ -104,13 +110,20 @@ const phases = { host: staticUrlsAPI.test, appHost: staticUrls.test, backboneApiHost: 'https://api-test-biohub-platform.apps.silver.devops.gov.bc.ca', + backboneIntakePath: '/api/dwc/submission/intake', + backboneIntakeEnabled: false, env: 'test', elasticsearchURL: 'http://es01:9200', + elasticsearchTaxonomyIndex: 'taxonomy_2.0.0', tz: config.timezone.api, - certificateURL: config.certificateURL.test, - replicas: 3, - maxReplicas: 5, - logLevel: 'info' + sso: config.sso.test, + logLevel: 'info', + cpuRequest: '200m', + cpuLimit: '1000m', + memoryRequest: '512Mi', + memoryLimit: '3Gi', + replicas: '3', + replicasMax: '5' }, prod: { namespace: 'af2668-prod', @@ -125,20 +138,27 @@ const phases = { host: staticUrlsAPI.prod, appHost: staticUrls.prod, backboneApiHost: 'https://api-biohub-platform.apps.silver.devops.gov.bc.ca', + backboneIntakePath: '/api/dwc/submission/intake', + backboneIntakeEnabled: false, env: 'prod', elasticsearchURL: 'http://es01:9200', + elasticsearchTaxonomyIndex: 'taxonomy_2.0.0', tz: config.timezone.api, - certificateURL: config.certificateURL.prod, - replicas: 3, - maxReplicas: 6, - logLevel: 'info' + sso: config.sso.prod, + logLevel: 'info', + cpuRequest: '200m', + cpuLimit: '1000m', + memoryRequest: '512Mi', + memoryLimit: '3Gi', + replicas: '5', + replicasMax: '8' } }; // This callback forces the node process to exit as failure. -process.on('unhandledRejection', (reason) => { - console.log(reason); +process.on('unhandledRejection', (reason, promise) => { + console.log('Unhandled Rejection at:', promise, 'reason:', reason); process.exit(1); }); -module.exports = exports = { phases, options, staticBranches }; +module.exports = exports = { phases, options }; diff --git a/api/.pipeline/lib/api.build.js b/api/.pipeline/lib/api.build.js index bc6d9580b2..1878e92b86 100644 --- a/api/.pipeline/lib/api.build.js +++ b/api/.pipeline/lib/api.build.js @@ -1,4 +1,5 @@ 'use strict'; + const { OpenShiftClientX } = require('pipeline-cli'); const path = require('path'); @@ -7,14 +8,14 @@ const path = require('path'); * * @param {*} settings */ -module.exports = (settings) => { +const apiBuild = (settings) => { const phases = settings.phases; const options = settings.options; const phase = 'build'; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options)); + const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../../openshift')); + const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../templates')); const name = `${phases[phase].name}`; @@ -27,7 +28,11 @@ module.exports = (settings) => { SUFFIX: phases[phase].suffix, VERSION: phases[phase].tag, SOURCE_REPOSITORY_URL: oc.git.http_url, - SOURCE_REPOSITORY_REF: phases[phase].branch || oc.git.ref + SOURCE_REPOSITORY_REF: phases[phase].branch || oc.git.ref, + CPU_REQUEST: phases[phase].cpuRequest, + CPU_LIMIT: phases[phase].cpuLimit, + MEMORY_REQUEST: phases[phase].memoryRequest, + MEMORY_LIMIT: phases[phase].memoryLimit } }) ); @@ -35,3 +40,5 @@ module.exports = (settings) => { oc.applyRecommendedLabels(objects, name, phase, phases[phase].changeId, phases[phase].instance); oc.applyAndBuild(objects); }; + +module.exports = { apiBuild }; diff --git a/api/.pipeline/lib/api.deploy.js b/api/.pipeline/lib/api.deploy.js index 0f6b0b867b..43aed5b9da 100644 --- a/api/.pipeline/lib/api.deploy.js +++ b/api/.pipeline/lib/api.deploy.js @@ -1,4 +1,5 @@ 'use strict'; + const { OpenShiftClientX } = require('pipeline-cli'); const path = require('path'); @@ -8,14 +9,14 @@ const path = require('path'); * @param {*} settings * @returns */ -module.exports = (settings) => { +const apiDeploy = (settings) => { const phases = settings.phases; const options = settings.options; const phase = options.env; const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../../openshift')); + const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../templates')); const changeId = phases[phase].changeId; @@ -31,17 +32,32 @@ module.exports = (settings) => { CHANGE_ID: phases.build.changeId || changeId, APP_HOST: phases[phase].appHost, BACKBONE_API_HOST: phases[phase].backboneApiHost, + BACKBONE_INTAKE_PATH: phases[phase].backboneIntakePath, + BACKBONE_INTAKE_ENABLED: phases[phase].backboneIntakeEnabled, NODE_ENV: phases[phase].env || 'dev', ELASTICSEARCH_URL: phases[phase].elasticsearchURL, + ELASTICSEARCH_TAXONOMY_INDEX: phases[phase].elasticsearchTaxonomyIndex, TZ: phases[phase].tz, - KEYCLOAK_ADMIN_USERNAME: 'sims-svc', - KEYCLOAK_SECRET: 'keycloak-admin-password', - KEYCLOAK_SECRET_ADMIN_PASSWORD: 'keycloak_admin_password', + KEYCLOAK_ADMIN_USERNAME: phases[phase].sso.adminUserName, + KEYCLOAK_SECRET: phases[phase].sso.keycloakSecret, + KEYCLOAK_SECRET_ADMIN_PASSWORD: phases[phase].sso.keycloakSecretAdminPassword, DB_SERVICE_NAME: `${phases[phase].dbName}-postgresql${phases[phase].suffix}`, - CERTIFICATE_URL: phases[phase].certificateURL, - REPLICAS: phases[phase].replicas || 1, - REPLICA_MAX: phases[phase].maxReplicas || 1, - LOG_LEVEL: phases[phase].logLevel || 'info' + KEYCLOAK_HOST: phases[phase].sso.url, + KEYCLOAK_CLIENT_ID: phases[phase].sso.clientId, + KEYCLOAK_REALM: phases[phase].sso.realm, + KEYCLOAK_INTEGRATION_ID: phases[phase].sso.integrationId, + KEYCLOAK_ADMIN_HOST: phases[phase].sso.adminHost, + KEYCLOAK_API_HOST: phases[phase].sso.apiHost, + KEYCLOAK_ADMIN_USERNAME: phases[phase].sso.adminUserName, + KEYCLOAK_SECRET: phases[phase].sso.keycloakSecret, + KEYCLOAK_SECRET_ADMIN_PASSWORD: phases[phase].sso.keycloakSecretAdminPassword, + LOG_LEVEL: phases[phase].logLevel || 'info', + CPU_REQUEST: phases[phase].cpuRequest, + CPU_LIMIT: phases[phase].cpuLimit, + MEMORY_REQUEST: phases[phase].memoryRequest, + MEMORY_LIMIT: phases[phase].memoryLimit, + REPLICAS: phases[phase].replicas, + REPLICAS_MAX: phases[phase].replicasMax } }) ); @@ -51,3 +67,5 @@ module.exports = (settings) => { oc.applyAndDeploy(objects, phases[phase].instance); }; + +module.exports = { apiDeploy }; diff --git a/api/.pipeline/lib/clean.js b/api/.pipeline/lib/clean.js index 3cd5da7ebc..1b421124a8 100644 --- a/api/.pipeline/lib/clean.js +++ b/api/.pipeline/lib/clean.js @@ -1,13 +1,13 @@ 'use strict'; + const { OpenShiftClientX } = require('pipeline-cli'); -const checkAndClean = require('../utils/checkAndClean'); /** * Run OC commands to clean all build and deployment artifacts (pods, imagestreams, builds/deployment configs, etc). * * @param {*} settings */ -module.exports = (settings) => { +const clean = (settings) => { const phases = settings.phases; const options = settings.options; const target_phase = options.env; @@ -15,7 +15,7 @@ module.exports = (settings) => { const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options)); for (let phaseKey in phases) { - if (!phases.hasOwnProperty(phaseKey)) { + if (!Object.prototype.hasOwnProperty.call(phases, phaseKey)) { continue; } @@ -61,16 +61,6 @@ module.exports = (settings) => { }); }); - // Extra cleaning for any disposable 'build' items (database migration/seeding pods, test pods, etc) - // This should include anything that is only run/used once, and can be deleted afterwards. - if (phaseKey !== 'build') { - const newOC = new OpenShiftClientX(Object.assign({ namespace: phases[phaseKey].namespace }, options)); - const setupPod = `${phases[phaseKey].name}${phases[phaseKey].suffix}-setup`; - // const testPod = `${phases[phaseKey].name}${phases[phaseKey].suffix}-test`; - checkAndClean(`pod/${setupPod}`, newOC); - // checkAndClean(`pod/${testPod}`, newOC); - } - oc.raw('delete', ['all'], { selector: `app=${phaseObj.instance},env-id=${phaseObj.changeId},!shared,github-repo=${oc.git.repository},github-owner=${oc.git.owner}`, wait: 'true', @@ -84,3 +74,5 @@ module.exports = (settings) => { }); } }; + +module.exports = { clean }; diff --git a/api/.pipeline/npmw b/api/.pipeline/npmw deleted file mode 100644 index 1eed7c9536..0000000000 --- a/api/.pipeline/npmw +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -set +x -type -t nvm && nvm deactivate -export NVM_DIR="$(git rev-parse --show-toplevel)/.nvm" -if [ ! -f "$NVM_DIR/nvm.sh" ]; then - mkdir -p "${NVM_DIR}" - curl -sSL -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash &>/dev/null -fi -source "$NVM_DIR/nvm.sh" &>/dev/null -METHOD=script nvm install --no-progress &>/dev/null -nvm use &>/dev/null -exec npm "$@" diff --git a/api/.pipeline/package-lock.json b/api/.pipeline/package-lock.json index 4438b4d725..a31f39821e 100644 --- a/api/.pipeline/package-lock.json +++ b/api/.pipeline/package-lock.json @@ -5,9 +5,9 @@ "requires": true, "dependencies": { "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "requires": { "ms": "2.1.2" } @@ -15,7 +15,7 @@ "lodash.isempty": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", - "integrity": "sha1-b4bL7di+TsmHvpqvM8loTbGzHn4=" + "integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==" }, "lodash.isfunction": { "version": "3.0.9", @@ -25,12 +25,12 @@ "lodash.isplainobject": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" }, "lodash.isstring": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" }, "ms": { "version": "2.1.2", @@ -38,8 +38,8 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "pipeline-cli": { - "version": "git+https://github.com/BCDevOps/pipeline-cli.git#256319700bc36145fea8511aa9a66f8f8f577926", - "from": "git+https://github.com/BCDevOps/pipeline-cli.git", + "version": "github:NickPhura/pipeline-cli#ef145da35393eb92f71f19aef6aad768f3534a5e", + "from": "github:NickPhura/pipeline-cli#master", "requires": { "debug": "^4.2.0", "lodash.isempty": "^4.0.1", @@ -47,6 +47,12 @@ "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1" } + }, + "prettier": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz", + "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==", + "dev": true } } } diff --git a/api/.pipeline/package.json b/api/.pipeline/package.json index 71e7493dcd..cd5f6c1896 100644 --- a/api/.pipeline/package.json +++ b/api/.pipeline/package.json @@ -12,16 +12,23 @@ "url": "git+https://github.com/bcgov/ocp-sso.git" }, "scripts": { - "build": "node api.build.js", - "deploy": "node api.deploy.js", - "clean": "node clean.js" + "build": "node scripts/api.build.js", + "deploy": "node scripts/api.deploy.js", + "clean": "node scripts/clean.js", + "lint": "eslint . --ignore-pattern 'node_modules' --ext .js,.ts", + "lint-fix": "eslint . --fix --ignore-pattern 'node_modules' --ext .js,.ts", + "format": "prettier --check \"./**/*.{js,jsx,ts,tsx,css,scss}\"", + "format-fix": "prettier --write \"./**/*.{js,jsx,ts,tsx,json,css,scss}\"" }, "dependencies": { - "pipeline-cli": "git+https://github.com/BCDevOps/pipeline-cli.git", "debug": "^4.2.0", "lodash.isempty": "^4.0.1", "lodash.isfunction": "^3.0.9", "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1" + "lodash.isstring": "^4.0.1", + "pipeline-cli": "NickPhura/pipeline-cli#master" + }, + "devDependencies": { + "prettier": "~2.3.2" } } diff --git a/api/.pipeline/scripts/api.build.js b/api/.pipeline/scripts/api.build.js new file mode 100644 index 0000000000..50ba782a41 --- /dev/null +++ b/api/.pipeline/scripts/api.build.js @@ -0,0 +1,9 @@ +'use strict'; + +const { apiBuild } = require('../lib/api.build.js'); +const config = require('../config.js'); + +const settings = { ...config, phase: 'build' }; + +// Builds the api image +apiBuild(settings); diff --git a/api/.pipeline/scripts/api.deploy.js b/api/.pipeline/scripts/api.deploy.js new file mode 100644 index 0000000000..db035f7986 --- /dev/null +++ b/api/.pipeline/scripts/api.deploy.js @@ -0,0 +1,9 @@ +'use strict'; + +const { apiDeploy } = require('../lib/api.deploy.js'); +const config = require('../config.js'); + +const settings = { ...config, phase: config.options.env }; + +// Deploys the api image +apiDeploy(settings); diff --git a/api/.pipeline/clean.js b/api/.pipeline/scripts/clean.js similarity index 55% rename from api/.pipeline/clean.js rename to api/.pipeline/scripts/clean.js index 44d75f9972..62c6a35451 100644 --- a/api/.pipeline/clean.js +++ b/api/.pipeline/scripts/clean.js @@ -1,9 +1,9 @@ 'use strict'; -const config = require('./config.js'); -const cleanTask = require('./lib/clean.js'); +const { clean } = require('../lib/clean.js'); +const config = require('../config.js'); const settings = { ...config, phase: config.options.env }; // Cleans all build and deployment artifacts (pods, etc) -cleanTask(settings); +clean(settings); diff --git a/api/.pipeline/templates/README.md b/api/.pipeline/templates/README.md new file mode 100644 index 0000000000..5053b7343d --- /dev/null +++ b/api/.pipeline/templates/README.md @@ -0,0 +1,12 @@ +# OpenShift Templates + +This folder contains yaml templates for the api builds, deployments, etc. + +## Prerequisites For Deploying On OpenShift + +The pipeline code builds and deploys all pods/images/storage/etc needed to deploy the application. However, there are some secrets that cannot be automatically deployed (as they cannot be committed to GitHub). You must manually create and populate these secrets. + +- Create Database Secret +- Create ObjectStore Secret + +The included templates under `prereqs` can be imported via the "Import YAML" page in OpenShift. diff --git a/api/openshift/api.bc.yaml b/api/.pipeline/templates/api.bc.yaml similarity index 89% rename from api/openshift/api.bc.yaml rename to api/.pipeline/templates/api.bc.yaml index 401257d696..24fec955d1 100644 --- a/api/openshift/api.bc.yaml +++ b/api/.pipeline/templates/api.bc.yaml @@ -1,10 +1,9 @@ apiVersion: template.openshift.io/v1 kind: Template metadata: - creationTimestamp: null + name: biohubbc-api-bc labels: build: biohubbc-api - name: biohubbc-api parameters: - name: NAME displayName: Name @@ -37,6 +36,14 @@ parameters: - name: SOURCE_IMAGE_TAG required: true value: 14-ubi8 + - name: CPU_REQUEST + value: '100m' + - name: CPU_LIMIT + value: '1250m' + - name: MEMORY_REQUEST + value: '512Mi' + - name: MEMORY_LIMIT + value: '3Gi' objects: - kind: ImageStream apiVersion: image.openshift.io/v1 @@ -87,12 +94,12 @@ objects: name: '${NAME}:${VERSION}' postCommit: {} resources: - limits: - cpu: 1250m - memory: 3Gi requests: - cpu: 100m - memory: 512Mi + cpu: ${CPU_REQUEST} + memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} runPolicy: SerialLatestOnly source: contextDir: '${SOURCE_CONTEXT_DIR}' @@ -110,5 +117,3 @@ objects: triggers: - type: ConfigChange - type: ImageChange - status: - lastVersion: 7 diff --git a/api/openshift/api.dc.yaml b/api/.pipeline/templates/api.dc.yaml similarity index 78% rename from api/openshift/api.dc.yaml rename to api/.pipeline/templates/api.dc.yaml index 252f392236..88083d19e3 100644 --- a/api/openshift/api.dc.yaml +++ b/api/.pipeline/templates/api.dc.yaml @@ -1,9 +1,9 @@ apiVersion: template.openshift.io/v1 kind: Template metadata: - resourceVersion: '' - selfLink: '' name: biohubbc-api-dc + labels: + build: biohubbc-api parameters: - name: ENABLE_FILE_VIRUS_SCAN value: 'true' @@ -25,7 +25,14 @@ parameters: description: APP host for application frontend value: '' - name: BACKBONE_API_HOST - description: API host for BioHub Platform Backbone + required: true + description: API host for BioHub Platform Backbone. Example "https://platform.com". + - name: BACKBONE_INTAKE_PATH + required: true + description: API path for BioHub Platform Backbone DwCA submission intake endpoint. Example "/api/path/to/intake". + - name: BACKBONE_INTAKE_ENABLED + required: true + description: Controls whether or not SIMS will submit DwCA datasets to the BioHub Platform Backbone. Set to "true" to enable it, will be disabled by default otherwise. - name: CHANGE_ID description: Change id of the project. This will help to pull image stream required: true @@ -40,27 +47,41 @@ parameters: - name: ELASTICSEARCH_URL description: Platform Elasticsearch URL required: true - value: 'https://elasticsearch-af2668-dev.apps.silver.devops.gov.bc.ca' + value: 'http://es01:9200' + - name: ELASTICSEARCH_TAXONOMY_INDEX + description: Platform Elasticsearch Taxonomy Index + required: true - name: TZ description: Application timezone required: false value: 'America/Vancouver' - - name: CERTIFICATE_URL - description: Authentication certificate urls - required: true - value: 'https://oidc.gov.bc.ca/auth/realms/35r1iman/protocol/openid-connect/certs' - name: KEYCLOAK_HOST - description: keycloak host url - value: https://dev.oidc.gov.bc.ca + description: Key clock login url + required: true - name: KEYCLOAK_REALM - description: keycloak realm definition - value: 35r1iman + description: Realm identifier or name + required: true + - name: KEYCLOAK_INTEGRATION_ID + description: keycloak integration id + required: true + - name: KEYCLOAK_API_HOST + description: keycloak API host + required: true + - name: KEYCLOAK_CLIENT_ID + description: Client Id for application + required: true + - name: KEYCLOAK_ADMIN_HOST + description: Keycloak admin host + required: true - name: KEYCLOAK_ADMIN_USERNAME description: keycloak host admin username + required: true - name: KEYCLOAK_SECRET description: The name of the keycloak secret + required: true - name: KEYCLOAK_SECRET_ADMIN_PASSWORD description: The key of the admin password in the keycloak secret + required: true - name: API_PORT_DEFAULT value: '6100' - name: API_PORT_DEFAULT_NAME @@ -97,8 +118,7 @@ parameters: value: '2Gi' - name: REPLICAS value: '1' - - name: REPLICA_MAX - required: true + - name: REPLICAS_MAX value: '1' objects: - apiVersion: image.openshift.io/v1 @@ -116,8 +136,9 @@ objects: local: false status: dockerImageRepository: null - - apiVersion: apps.openshift.io/v1 - kind: DeploymentConfig + + - kind: DeploymentConfig + apiVersion: apps.openshift.io/v1 metadata: annotations: openshift.io/generated-by: OpenShiftWebConsole @@ -127,7 +148,7 @@ objects: role: api name: ${NAME}${SUFFIX} spec: - replicas: '${{REPLICAS}}' + replicas: ${{REPLICAS}} revisionHistoryLimit: 10 selector: deploymentConfig: ${NAME}${SUFFIX} @@ -136,12 +157,12 @@ objects: recreateParams: timeoutSeconds: 600 resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} requests: cpu: ${CPU_REQUEST} memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} type: Rolling template: metadata: @@ -161,6 +182,10 @@ objects: value: ${APP_HOST} - name: BACKBONE_API_HOST value: ${BACKBONE_API_HOST} + - name: BACKBONE_INTAKE_PATH + value: ${BACKBONE_INTAKE_PATH} + - name: BACKBONE_INTAKE_ENABLED + value: ${BACKBONE_INTAKE_ENABLED} - name: ENABLE_FILE_VIRUS_SCAN value: ${ENABLE_FILE_VIRUS_SCAN} - name: CLAMAV_HOST @@ -186,12 +211,18 @@ objects: name: ${DB_SERVICE_NAME} - name: DB_PORT value: '5432' - - name: KEYCLOAK_URL - value: ${CERTIFICATE_URL} - name: KEYCLOAK_HOST value: ${KEYCLOAK_HOST} + - name: KEYCLOAK_API_HOST + value: ${KEYCLOAK_API_HOST} - name: KEYCLOAK_REALM value: ${KEYCLOAK_REALM} + - name: KEYCLOAK_CLIENT_ID + value: ${KEYCLOAK_CLIENT_ID} + - name: KEYCLOAK_INTEGRATION_ID + value: ${KEYCLOAK_INTEGRATION_ID} + - name: KEYCLOAK_ADMIN_HOST + value: ${KEYCLOAK_ADMIN_HOST} - name: KEYCLOAK_ADMIN_USERNAME value: ${KEYCLOAK_ADMIN_USERNAME} - name: KEYCLOAK_ADMIN_PASSWORD @@ -205,6 +236,8 @@ objects: value: ${NODE_ENV} - name: ELASTICSEARCH_URL value: ${ELASTICSEARCH_URL} + - name: ELASTICSEARCH_TAXONOMY_INDEX + value: ${ELASTICSEARCH_TAXONOMY_INDEX} - name: TZ value: ${TZ} - name: VERSION @@ -253,32 +286,32 @@ objects: - containerPort: ${{API_PORT_DEFAULT}} protocol: TCP resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} requests: cpu: ${CPU_REQUEST} memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} readinessProbe: - failureThreshold: 10 httpGet: path: /api/version port: ${{API_PORT_DEFAULT}} scheme: HTTP - initialDelaySeconds: 100 + initialDelaySeconds: 60 periodSeconds: 30 + timeoutSeconds: 15 successThreshold: 1 - timeoutSeconds: 20 + failureThreshold: 20 livenessProbe: - failureThreshold: 10 httpGet: path: /api/version port: ${{API_PORT_DEFAULT}} scheme: HTTP - initialDelaySeconds: 80 + initialDelaySeconds: 60 periodSeconds: 30 + timeoutSeconds: 15 successThreshold: 1 - timeoutSeconds: 20 + failureThreshold: 20 terminationMessagePath: /dev/termination-log terminationMessagePolicy: File volumeMounts: @@ -303,18 +336,27 @@ objects: name: ${NAME}:${VERSION} type: ImageChange - type: ConfigChange - - apiVersion: v1 + status: + availableReplicas: 0 + latestVersion: 0 + observedGeneration: 0 + replicas: 0 + unavailableReplicas: 0 + updatedReplicas: 0 + + - kind: Secret + apiVersion: v1 stringData: database-name: '' database-user-api-password: '' database-user-api: '' - kind: Secret metadata: annotations: as-copy-of: ${DB_SERVICE_NAME} creationTimestamp: null name: ${NAME}${SUFFIX} type: Opaque + - apiVersion: v1 kind: Service metadata: @@ -334,10 +376,15 @@ objects: type: ClusterIP status: loadBalancer: {} - - apiVersion: route.openshift.io/v1 - kind: Route + + - kind: Route + apiVersion: route.openshift.io/v1 metadata: - annotations: {} + annotations: + # These 2 annotations (haproxy.router.openshift.io/balance and haproxy.router.openshift.io/disable_cookies) are set in order to prevent 'sticky sessions' + # 'sticky sessions': If an API pod is busy fullfilling a request, subsequent requests will go to other API pods rather than wait for the same pod (when pod replicas is > 1) + haproxy.router.openshift.io/balance: 'leastconn' + haproxy.router.openshift.io/disable_cookies: 'true' creationTimestamp: null labels: {} name: ${NAME}${SUFFIX} @@ -355,16 +402,17 @@ objects: wildcardPolicy: None status: ingress: null - - apiVersion: autoscaling/v2beta2 - kind: HorizontalPodAutoscaler + + - kind: HorizontalPodAutoscaler + apiVersion: autoscaling/v2beta2 metadata: annotations: {} creationTimestamp: null labels: {} name: ${NAME}${SUFFIX} spec: - maxReplicas: '${{REPLICA_MAX}}' - minReplicas: '${{REPLICAS}}' + minReplicas: ${{REPLICAS}} + maxReplicas: ${{REPLICAS_MAX}} scaleTargetRef: apiVersion: apps.openshift.io/v1 kind: DeploymentConfig diff --git a/api/openshift/prereqs/biohubbc-db.yaml b/api/.pipeline/templates/prereqs/biohubbc-db.yaml similarity index 90% rename from api/openshift/prereqs/biohubbc-db.yaml rename to api/.pipeline/templates/prereqs/biohubbc-db.yaml index 3b4eaef8cb..82f58c2ff8 100644 --- a/api/openshift/prereqs/biohubbc-db.yaml +++ b/api/.pipeline/templates/prereqs/biohubbc-db.yaml @@ -1,7 +1,7 @@ kind: Secret apiVersion: v1 metadata: - name: biohubbc-db + name: biohubbc-db-secret data: database-admin-password: database-admin-user: diff --git a/api/openshift/prereqs/biohubbc-object-store.yaml b/api/.pipeline/templates/prereqs/biohubbc-object-store.yaml similarity index 86% rename from api/openshift/prereqs/biohubbc-object-store.yaml rename to api/.pipeline/templates/prereqs/biohubbc-object-store.yaml index d8ebf0bdde..e6ffdaf504 100644 --- a/api/openshift/prereqs/biohubbc-object-store.yaml +++ b/api/.pipeline/templates/prereqs/biohubbc-object-store.yaml @@ -1,7 +1,7 @@ kind: Secret apiVersion: v1 metadata: - name: biohubbc-object-store + name: biohubbc-object-store-secret data: object_store_access_key_id: object_store_bucket_name: diff --git a/api/.pipeline/utils/checkAndClean.js b/api/.pipeline/utils/checkAndClean.js deleted file mode 100644 index 5832b64e0c..0000000000 --- a/api/.pipeline/utils/checkAndClean.js +++ /dev/null @@ -1,17 +0,0 @@ -/** - * @description Check and delete existing resource - */ -module.exports = (resourceName, oc) => { - try { - const list = oc.get(resourceName) || []; - if (list.length === 0) { - console.log(`checkAndClean: No resource available with resource name: ${resourceName}`); - } else { - console.log(`checkAndClean: Cleaning resource => ${resourceName}`); - oc.delete([resourceName], { 'ignore-not-found': 'true', wait: 'true' }); - } - } catch (excp) { - console.log(`Resource ${resourceName} not available [${excp}]`); - } -}; -// --------------------------------------- diff --git a/api/.pipeline/utils/wait.js b/api/.pipeline/utils/wait.js deleted file mode 100644 index 93626412f0..0000000000 --- a/api/.pipeline/utils/wait.js +++ /dev/null @@ -1,111 +0,0 @@ -'use strict'; -const { OpenShiftClientX } = require('pipeline-cli'); - -module.exports = (resourceName, settings, countArg, timeoutArg) => { - const phases = settings.phases; - const options = settings.options; - const phase = options.env; - - const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - - const timeout = timeoutArg || 20000; - let count = countArg || 20; - - const check = () => { - try { - console.log(`Getting resource ${resourceName}`); - const list = oc.get(resourceName) || []; - // console.log(`${list.length}:${JSON.stringify(list, null, 2)}`) - if (list.length === 0) { - console.log(`Unable to fetch API resource: ${resourceName}`); - throw new Error(`Unable to fetch API resource: ${resourceName}`); - } - // console.log(JSON.stringify(data, null, 2)); - // Get Status - console.log(`Getting POD Status: ${resourceName}`); - const data = list[0]; - const status = data.status || { conditions: [], containerStatuses: [] }; - if (status.conditions && status.conditions.length === 0) { - console.log(`Unable to fetch API resource: ${resourceName} status`); - console.log(`${JSON.stringify(data)}`); - - // Retry if count is not zero - if (count > 0) { - console.log(`Retry until count is 0: ${resourceName}`); - count = count - 1; - setTimeout(check, timeout); - } else { - throw new Error(`Unable to fetch API resource: ${resourceName} status`); - } - } - - if (!status.containerStatuses) { - console.log(`Unable to fetch API resource: ${resourceName} container state (not defined)`); - console.log(`${JSON.stringify(data)}`); - - // Retry if count is not zero - if (count > 0) { - console.log(`Retry until count is 0: ${resourceName}`); - count = count - 1; - setTimeout(check, timeout); - return; - } else { - throw new Error(`Unable to fetch API resource: ${resourceName} status`); - } - } - - // Checking Container state - if (status.containerStatuses && status.containerStatuses.length === 0) { - console.log(`Unable to fetch API resource: ${resourceName} container state`); - console.log(`${JSON.stringify(data)}`); - - // Retry if count is not zero - if (count > 0) { - console.log(`Retry until count is 0: ${resourceName}`); - count = count - 1; - setTimeout(check, timeout); - return; - } else { - throw new Error(`Unable to fetch API resource: ${resourceName} status`); - } - } - - console.log(`Checking Container State: ${resourceName}`); - const containerStatus = status.containerStatuses[0] || {}; - if (!containerStatus.state) { - console.log(`Unable to fetch API resource: ${resourceName} container state`); - console.log(`${JSON.stringify(data)}`); - throw new Error(`Unable to fetch API resource: ${resourceName} container state`); - } - const state = containerStatus.state || {}; - if (state.terminated) { - if (state.terminated.reason.toLowerCase() === 'completed') { - console.log(`${resourceName}: Finished [Successfully]`); - // console.log(`${resourceName}: Deleting`) - // Remove Pod - // oc.delete([resourceName], {'ignore-not-found':'true', 'wait':'true'}) - return; - } else { - console.log(`Unable to fetch API resource: ${resourceName} terminated with error`); - console.log(JSON.stringify(data.status, null, 2)); - throw new Error(`Unable to fetch API resource: ${resourceName} terminated with error`); - } - } else { - if (count > 0) { - console.log(`Waiting for resource: ${resourceName} to finish ... ${count}`); - count = count - 1; - setTimeout(check, timeout); - } else { - console.log(`Wait time exceed for resource: ${resourceName}`); - console.log(`${JSON.stringify(data)}`); - throw new Error(`Wait time exceed for resource: ${resourceName}`); - } - } - } catch (excp) { - console.log(`Pod (${resourceName}) Wait: Exception ${excp}`); - throw excp; - } - }; - - setTimeout(check, timeout + 10000); -}; diff --git a/api/openshift/api.is.yaml b/api/openshift/api.is.yaml deleted file mode 100644 index e8d4deb865..0000000000 --- a/api/openshift/api.is.yaml +++ /dev/null @@ -1,25 +0,0 @@ -apiVersion: template.openshift.io/v1 -parameters: - - name: NAME - value: biohubbc-api-setup -objects: - - apiVersion: image.openshift.io/v1 - kind: ImageStream - metadata: - annotations: - description: Application api base image - labels: - shared: "true" - creationTimestamp: null - generation: 0 - name: ${NAME} - spec: - lookupPolicy: - local: false - status: - dockerImageRepository: null - # End IS - # End object -kind: Template -# End Template - diff --git a/api/openshift/prereqs/README.md b/api/openshift/prereqs/README.md deleted file mode 100644 index 2d43ecf2ef..0000000000 --- a/api/openshift/prereqs/README.md +++ /dev/null @@ -1,7 +0,0 @@ -# Prerequisites for deploying on openshift - -* Create Database Secret -* Create ObjectStore Secret - -First iteration we'll create this manually, but after that we'll create a template for deployment in test and prod. -Update for triggering the pl diff --git a/api/package-lock.json b/api/package-lock.json index 6f5b522728..e1a31c5beb 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -1,8 +1,12500 @@ { "name": "sims-api", "version": "0.0.0", - "lockfileVersion": 1, + "lockfileVersion": 2, "requires": true, + "packages": { + "": { + "name": "sims-api", + "version": "0.0.0", + "license": "Apache-2.0", + "dependencies": { + "@elastic/elasticsearch": "~8.1.0", + "@turf/bbox": "~6.5.0", + "@turf/circle": "~6.5.0", + "@turf/helpers": "~6.5.0", + "@turf/meta": "~6.5.0", + "adm-zip": "~0.5.5", + "ajv": "~8.12.0", + "aws-sdk": "~2.742.0", + "axios": "~0.21.4", + "clamdjs": "~1.0.2", + "db-migrate": "~0.11.11", + "db-migrate-pg": "~1.2.2", + "express": "~4.17.1", + "express-openapi": "~9.3.0", + "fast-deep-equal": "~3.1.3", + "fast-json-patch": "~3.1.1", + "form-data": "~4.0.0", + "jsonpath": "~1.1.1", + "jsonpath-plus": "^7.2.0", + "jsonwebtoken": "~8.5.1", + "jwks-rsa": "~2.0.5", + "knex": "~1.0.1", + "lodash": "~4.17.21", + "mime": "~2.5.2", + "moment": "~2.29.2", + "multer": "~1.4.3", + "pg": "~8.7.1", + "qs": "~6.10.1", + "sql-template-strings": "~2.2.2", + "swagger-ui-express": "~4.3.0", + "typescript": "~4.1.6", + "utm": "^1.1.1", + "uuid": "~8.3.2", + "winston": "~3.3.3", + "xlsx": "~0.18.5", + "xml2js": "~0.4.23" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "~1.0.1", + "@types/adm-zip": "~0.4.34", + "@types/chai": "~4.2.22", + "@types/express": "~4.17.13", + "@types/geojson": "~7946.0.8", + "@types/gulp": "~4.0.9", + "@types/jsonpath": "~0.2.0", + "@types/jsonwebtoken": "~8.5.5", + "@types/lodash": "~4.14.176", + "@types/mime": "~2.0.3", + "@types/mocha": "~9.0.0", + "@types/multer": "~1.4.7", + "@types/node": "~14.14.31", + "@types/pg": "~8.6.1", + "@types/sinon": "~10.0.4", + "@types/sinon-chai": "~3.2.5", + "@types/swagger-ui-express": "~4.1.3", + "@types/utm": "^1.1.1", + "@types/uuid": "~8.3.1", + "@types/xml2js": "~0.4.9", + "@types/yamljs": "~0.2.31", + "@typescript-eslint/eslint-plugin": "~4.33.0", + "@typescript-eslint/parser": "~4.33.0", + "chai": "~4.3.4", + "del": "~6.0.0", + "eslint": "~7.32.0", + "eslint-config-prettier": "~6.15.0", + "eslint-plugin-prettier": "~3.3.1", + "gulp": "~4.0.2", + "gulp-typescript": "~5.0.1", + "mocha": "~8.4.0", + "nodemon": "~2.0.14", + "npm-run-all": "~4.1.5", + "nyc": "~15.1.0", + "prettier": "~2.2.1", + "prettier-plugin-organize-imports": "~2.3.4", + "sinon": "~11.1.2", + "sinon-chai": "~3.7.0", + "ts-mocha": "~8.0.0", + "ts-node": "~10.4.0" + }, + "engines": { + "node": ">= 14.0.0", + "npm": ">= 6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.10.4.tgz", + "integrity": "sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.10.4" + } + }, + "node_modules/@babel/core": { + "version": "7.12.10", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.12.10.tgz", + "integrity": "sha512-eTAlQKq65zHfkHZV0sIVODCPGVgoo1HdBlbSLi9CqOzuZanMv2ihzY+4paiKr1mH+XmYESMAmJ/dpZ68eN6d8w==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/generator": "^7.12.10", + "@babel/helper-module-transforms": "^7.12.1", + "@babel/helpers": "^7.12.5", + "@babel/parser": "^7.12.10", + "@babel/template": "^7.12.7", + "@babel/traverse": "^7.12.10", + "@babel/types": "^7.12.10", + "convert-source-map": "^1.7.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.1", + "json5": "^2.1.2", + "lodash": "^4.17.19", + "semver": "^5.4.1", + "source-map": "^0.5.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.12.11.tgz", + "integrity": "sha512-Ggg6WPOJtSi8yYQvLVjG8F/TlpWDlKx0OpS4Kt+xMQPs5OaGYWy+v1A+1TvxI6sAMGZpKWWoAQ1DaeQbImlItA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.11", + "jsesc": "^2.5.1", + "source-map": "^0.5.0" + } + }, + "node_modules/@babel/helper-function-name": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.12.11.tgz", + "integrity": "sha512-AtQKjtYNolKNi6nNNVLQ27CP6D9oFR6bq/HPYSizlzbp7uC1M59XJe8L+0uXjbIaZaUJF99ruHqVGiKXU/7ybA==", + "dev": true, + "dependencies": { + "@babel/helper-get-function-arity": "^7.12.10", + "@babel/template": "^7.12.7", + "@babel/types": "^7.12.11" + } + }, + "node_modules/@babel/helper-get-function-arity": { + "version": "7.12.10", + "resolved": "https://registry.npmjs.org/@babel/helper-get-function-arity/-/helper-get-function-arity-7.12.10.tgz", + "integrity": "sha512-mm0n5BPjR06wh9mPQaDdXWDoll/j5UpCAPl1x8fS71GHm7HA6Ua2V4ylG1Ju8lvcTOietbPNNPaSilKj+pj+Ag==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.10" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.12.7.tgz", + "integrity": "sha512-DCsuPyeWxeHgh1Dus7APn7iza42i/qXqiFPWyBDdOFtvS581JQePsc1F/nD+fHrcswhLlRc2UpYS1NwERxZhHw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.7" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.12.5", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.12.5.tgz", + "integrity": "sha512-SR713Ogqg6++uexFRORf/+nPXMmWIn80TALu0uaFb+iQIUoR7bOC7zBWyzBs5b3tBBJXuyD0cRu1F15GyzjOWA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.5" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.12.1.tgz", + "integrity": "sha512-QQzehgFAZ2bbISiCpmVGfiGux8YVFXQ0abBic2Envhej22DVXV9nCFaS5hIQbkyo1AdGb+gNME2TSh3hYJVV/w==", + "dev": true, + "dependencies": { + "@babel/helper-module-imports": "^7.12.1", + "@babel/helper-replace-supers": "^7.12.1", + "@babel/helper-simple-access": "^7.12.1", + "@babel/helper-split-export-declaration": "^7.11.0", + "@babel/helper-validator-identifier": "^7.10.4", + "@babel/template": "^7.10.4", + "@babel/traverse": "^7.12.1", + "@babel/types": "^7.12.1", + "lodash": "^4.17.19" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.12.10", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.12.10.tgz", + "integrity": "sha512-4tpbU0SrSTjjt65UMWSrUOPZTsgvPgGG4S8QSTNHacKzpS51IVWGDj0yCwyeZND/i+LSN2g/O63jEXEWm49sYQ==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.10" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.12.11.tgz", + "integrity": "sha512-q+w1cqmhL7R0FNzth/PLLp2N+scXEK/L2AHbXUyydxp828F4FEa5WcVoqui9vFRiHDQErj9Zof8azP32uGVTRA==", + "dev": true, + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.12.7", + "@babel/helper-optimise-call-expression": "^7.12.10", + "@babel/traverse": "^7.12.10", + "@babel/types": "^7.12.11" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.12.1", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.12.1.tgz", + "integrity": "sha512-OxBp7pMrjVewSSC8fXDFrHrBcJATOOFssZwv16F3/6Xtc138GHybBfPbm9kfiqQHKhYQrlamWILwlDCeyMFEaA==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.1" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.12.11.tgz", + "integrity": "sha512-LsIVN8j48gHgwzfocYUSkO/hjYAOJqlpJEc7tGXcIm4cubjVUf8LGW6eWRyxEu7gA25q02p0rQUWoCI33HNS5g==", + "dev": true, + "dependencies": { + "@babel/types": "^7.12.11" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz", + "integrity": "sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw==", + "dev": true + }, + "node_modules/@babel/helpers": { + "version": "7.12.5", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.12.5.tgz", + "integrity": "sha512-lgKGMQlKqA8meJqKsW6rUnc4MdUk35Ln0ATDqdM1a/UpARODdI4j5Y5lVfUScnSNkJcdCRAaWkspykNoFg9sJA==", + "dev": true, + "dependencies": { + "@babel/template": "^7.10.4", + "@babel/traverse": "^7.12.5", + "@babel/types": "^7.12.5" + } + }, + "node_modules/@babel/highlight": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.10.4.tgz", + "integrity": "sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.10.4", + "chalk": "^2.0.0", + "js-tokens": "^4.0.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/@babel/parser": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.12.11.tgz", + "integrity": "sha512-N3UxG+uuF4CMYoNj8AhnbAcJF0PiuJ9KHuy1lQmkYsxTer/MAH9UBNHsBoAX/4s6NvlDD047No8mYVGGzLL4hg==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/template": { + "version": "7.12.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.12.7.tgz", + "integrity": "sha512-GkDzmHS6GV7ZeXfJZ0tLRBhZcMcY0/Lnb+eEbXDBfCAcZCjrZKe6p3J4we/D24O9Y8enxWAg1cWwof59yLh2ow==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.10.4", + "@babel/parser": "^7.12.7", + "@babel/types": "^7.12.7" + } + }, + "node_modules/@babel/traverse": { + "version": "7.12.12", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.12.12.tgz", + "integrity": "sha512-s88i0X0lPy45RrLM8b9mz8RPH5FqO9G9p7ti59cToE44xFm1Q+Pjh5Gq4SXBbtb88X7Uy7pexeqRIQDDMNkL0w==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.11", + "@babel/generator": "^7.12.11", + "@babel/helper-function-name": "^7.12.11", + "@babel/helper-split-export-declaration": "^7.12.11", + "@babel/parser": "^7.12.11", + "@babel/types": "^7.12.12", + "debug": "^4.1.0", + "globals": "^11.1.0", + "lodash": "^4.17.19" + } + }, + "node_modules/@babel/traverse/node_modules/@babel/code-frame": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.10.4" + } + }, + "node_modules/@babel/traverse/node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/types": { + "version": "7.12.12", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.12.12.tgz", + "integrity": "sha512-lnIX7piTxOH22xE7fDXDbSHg9MM1/6ORnafpJmov5rs0kX5g4BZxeXNJLXsMRiO0U5Rb8/FvMS6xlTnTHvxonQ==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.12.11", + "lodash": "^4.17.19", + "to-fast-properties": "^2.0.0" + } + }, + "node_modules/@babel/types/node_modules/@babel/helper-validator-identifier": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.12.11.tgz", + "integrity": "sha512-np/lG3uARFybkoHokJUmf1QfEvRVCPbmQeUQpKow5cQ3xWrV9i3rUHodKDJPQfTVX61qKi+UdYk8kik84n7XOw==", + "dev": true + }, + "node_modules/@cspotcode/source-map-consumer": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-consumer/-/source-map-consumer-0.8.0.tgz", + "integrity": "sha512-41qniHzTU8yAGbCp04ohlmSrZf8bkf/iJsl3V0dRGsQN/5GFfx+LbCSsCpp2gqrqjTVg/K6O8ycoV35JIwAzAg==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/@cspotcode/source-map-support": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.7.0.tgz", + "integrity": "sha512-X4xqRHqN8ACt2aHVe51OxeA2HjbcL4MqFqXkrmQszJ1NOUuUu5u6Vqx/0lZSVNku7velL5FC/s5uEAj1lsBMhA==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-consumer": "0.8.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@dabh/diagnostics": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@dabh/diagnostics/-/diagnostics-2.0.2.tgz", + "integrity": "sha512-+A1YivoVDNNVCdfozHSR8v/jyuuLTMXwjWuxPFlFlUapXoGc+Gj9mDlTDDfrwl7rXCl2tNZ0kE8sIBO6YOn96Q==", + "dependencies": { + "colorspace": "1.1.x", + "enabled": "2.0.x", + "kuler": "^2.0.0" + } + }, + "node_modules/@elastic/elasticsearch": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/@elastic/elasticsearch/-/elasticsearch-8.1.0.tgz", + "integrity": "sha512-IiZ6u77C7oYYbUkx/YFgEJk6ZtP+QDI97VaUWiYD14xIdn/w9WJtmx/Y1sN8ov0nZzrWbqScB2Z7Pb8oxo7vqw==", + "dependencies": { + "@elastic/transport": "^8.0.2", + "tslib": "^2.3.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@elastic/elasticsearch/node_modules/tslib": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", + "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" + }, + "node_modules/@elastic/transport": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@elastic/transport/-/transport-8.0.2.tgz", + "integrity": "sha512-OlDz3WO3pKE9vSxW4wV/mn7rYCtBmSsDwxr64h/S1Uc/zrIBXb0iUsRMSkiybXugXhjwyjqG2n1Wc7jjFxrskQ==", + "dependencies": { + "debug": "^4.3.2", + "hpagent": "^0.1.2", + "ms": "^2.1.3", + "secure-json-parse": "^2.4.0", + "tslib": "^2.3.0", + "undici": "^4.14.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@elastic/transport/node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@elastic/transport/node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/@elastic/transport/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/@elastic/transport/node_modules/tslib": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.3.1.tgz", + "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" + }, + "node_modules/@eslint/eslintrc": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-0.4.3.tgz", + "integrity": "sha512-J6KFFz5QCYUJq3pf0mjEcCJVERbzv71PUIDczuh9JkwGEzced6CO5ADLHB1rbf/+oPBtoPfMYNOpGDzCANlbXw==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.1.1", + "espree": "^7.3.0", + "globals": "^13.9.0", + "ignore": "^4.0.6", + "import-fresh": "^3.2.1", + "js-yaml": "^3.13.1", + "minimatch": "^3.0.4", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/@eslint/eslintrc/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@eslint/eslintrc/node_modules/ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/@eslint/eslintrc/node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@humanwhocodes/config-array": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.5.0.tgz", + "integrity": "sha512-FagtKFz74XrTl7y6HCzQpwDfXP0yhxe9lHLD1UZxjvZIcbyRz8zTFF/yYNfSfzU414eDwZ1SrO0Qvtyf+wFMQg==", + "dev": true, + "dependencies": { + "@humanwhocodes/object-schema": "^1.2.0", + "debug": "^4.1.1", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.10.0" + } + }, + "node_modules/@humanwhocodes/object-schema": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz", + "integrity": "sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==", + "dev": true + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/nyc-config-typescript": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@istanbuljs/nyc-config-typescript/-/nyc-config-typescript-1.0.1.tgz", + "integrity": "sha512-/gz6LgVpky205LuoOfwEZmnUtaSmdk0QIMcNFj9OvxhiMhPpKftMgZmGN7jNj7jR+lr8IB1Yks3QSSSNSxfoaQ==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2" + }, + "engines": { + "node": ">=8" + }, + "peerDependencies": { + "nyc": ">=15", + "source-map-support": "*", + "ts-node": "*" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.2.tgz", + "integrity": "sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz", + "integrity": "sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.4", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz", + "integrity": "sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz", + "integrity": "sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.4", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@panva/asn1.js": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@panva/asn1.js/-/asn1.js-1.0.0.tgz", + "integrity": "sha512-UdkG3mLEqXgnlKsWanWcgb6dOjUzJ+XC5f+aWw30qrtjxeNUSfKX1cd5FBzOaXQumoe9nIqeZUvrRJS03HCCtw==", + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/@sindresorhus/is": { + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.14.0.tgz", + "integrity": "sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/@sinonjs/commons": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-1.8.3.tgz", + "integrity": "sha512-xkNcLAn/wZaX14RPlwizcKicDk9G3F8m2nU3L7Ukm5zBgTwiT0wsoFAHx9Jq56fJA1z/7uKGtCRu16sOUCLIHQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz", + "integrity": "sha512-iQADsW4LBMISqZ6Ci1dupJL9pprqwcVFTcOsEmQOEhW+KLCVn/Y4Jrvg2k19fIHCp+iFprriYPTdRcQR8NbUPg==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0" + } + }, + "node_modules/@sinonjs/samsam": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/@sinonjs/samsam/-/samsam-6.0.2.tgz", + "integrity": "sha512-jxPRPp9n93ci7b8hMfJOFDPRLFYadN6FSpeROFTR4UNF4i5b+EK6m4QXPO46BDhFgRy1JuS87zAnFOzCUwMJcQ==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.6.0", + "lodash.get": "^4.4.2", + "type-detect": "^4.0.8" + } + }, + "node_modules/@sinonjs/text-encoding": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz", + "integrity": "sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ==", + "dev": true + }, + "node_modules/@szmarczak/http-timer": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-1.1.2.tgz", + "integrity": "sha512-XIB2XbzHTN6ieIjfIMV9hlVcfPU26s2vafYWQcZHWXHOxiaRZYEDKEwdl129Zyg50+foYV2jCgtrqSA6qNuNSA==", + "dev": true, + "dependencies": { + "defer-to-connect": "^1.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/@tsconfig/node10": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.8.tgz", + "integrity": "sha512-6XFfSQmMgq0CFLY1MslA/CPUfhIL919M1rMsa5lP2P097N2Wd1sSX0tx1u4olM16fLNhtHZpRhedZJphNJqmZg==", + "dev": true + }, + "node_modules/@tsconfig/node12": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.9.tgz", + "integrity": "sha512-/yBMcem+fbvhSREH+s14YJi18sp7J9jpuhYByADT2rypfajMZZN4WQ6zBGgBKp53NKmqI36wFYDb3yaMPurITw==", + "dev": true + }, + "node_modules/@tsconfig/node14": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.1.tgz", + "integrity": "sha512-509r2+yARFfHHE7T6Puu2jjkoycftovhXRqW328PDXTVGKihlb1P8Z9mMZH04ebyajfRY7dedfGynlrFHJUQCg==", + "dev": true + }, + "node_modules/@tsconfig/node16": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.2.tgz", + "integrity": "sha512-eZxlbI8GZscaGS7kkc/trHTT5xgrjH3/1n2JDwusC9iahPKWMRvRjJSAN5mCXviuTGQ/lHnhvv8Q1YTpnfz9gA==", + "dev": true + }, + "node_modules/@turf/bbox": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@turf/bbox/-/bbox-6.5.0.tgz", + "integrity": "sha512-RBbLaao5hXTYyyg577iuMtDB8ehxMlUqHEJiMs8jT1GHkFhr6sYre3lmLsPeYEi/ZKj5TP5tt7fkzNdJ4GIVyw==", + "dependencies": { + "@turf/helpers": "^6.5.0", + "@turf/meta": "^6.5.0" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/circle": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@turf/circle/-/circle-6.5.0.tgz", + "integrity": "sha512-oU1+Kq9DgRnoSbWFHKnnUdTmtcRUMmHoV9DjTXu9vOLNV5OWtAAh1VZ+mzsioGGzoDNT/V5igbFOkMfBQc0B6A==", + "dependencies": { + "@turf/destination": "^6.5.0", + "@turf/helpers": "^6.5.0" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/destination": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@turf/destination/-/destination-6.5.0.tgz", + "integrity": "sha512-4cnWQlNC8d1tItOz9B4pmJdWpXqS0vEvv65bI/Pj/genJnsL7evI0/Xw42RvEGROS481MPiU80xzvwxEvhQiMQ==", + "dependencies": { + "@turf/helpers": "^6.5.0", + "@turf/invariant": "^6.5.0" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/helpers": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@turf/helpers/-/helpers-6.5.0.tgz", + "integrity": "sha512-VbI1dV5bLFzohYYdgqwikdMVpe7pJ9X3E+dlr425wa2/sMJqYDhTO++ec38/pcPvPE6oD9WEEeU3Xu3gza+VPw==", + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/invariant": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@turf/invariant/-/invariant-6.5.0.tgz", + "integrity": "sha512-Wv8PRNCtPD31UVbdJE/KVAWKe7l6US+lJItRR/HOEW3eh+U/JwRCSUl/KZ7bmjM/C+zLNoreM2TU6OoLACs4eg==", + "dependencies": { + "@turf/helpers": "^6.5.0" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@turf/meta": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/@turf/meta/-/meta-6.5.0.tgz", + "integrity": "sha512-RrArvtsV0vdsCBegoBtOalgdSOfkBrTJ07VkpiCnq/491W67hnMWmDu7e6Ztw0C3WldRYTXkg3SumfdzZxLBHA==", + "dependencies": { + "@turf/helpers": "^6.5.0" + }, + "funding": { + "url": "https://opencollective.com/turf" + } + }, + "node_modules/@types/adm-zip": { + "version": "0.4.34", + "resolved": "https://registry.npmjs.org/@types/adm-zip/-/adm-zip-0.4.34.tgz", + "integrity": "sha512-8ToYLLAYhkRfcmmljrKi22gT2pqu7hGMDtORP1emwIEGmgUTZOsaDjzWFzW5N2frcFRz/50CWt4zA1CxJ73pmQ==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.1.tgz", + "integrity": "sha512-a6bTJ21vFOGIkwM0kzh9Yr89ziVxq4vYH2fQ6N8AeipEzai/cFK6aGMArIkUeIdRIgpwQa+2bXiLuUJCpSf2Cg==", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/chai": { + "version": "4.2.22", + "resolved": "https://registry.npmjs.org/@types/chai/-/chai-4.2.22.tgz", + "integrity": "sha512-tFfcE+DSTzWAgifkjik9AySNqIyNoYwmR+uecPwwD/XRNfvOjmC/FjCxpiUGDkDVDphPfCUecSQVFw+lN3M3kQ==", + "dev": true + }, + "node_modules/@types/connect": { + "version": "3.4.35", + "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.35.tgz", + "integrity": "sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/expect": { + "version": "1.20.4", + "resolved": "https://registry.npmjs.org/@types/expect/-/expect-1.20.4.tgz", + "integrity": "sha512-Q5Vn3yjTDyCMV50TB6VRIbQNxSE4OmZR86VSbGaNpfUolm0iePBB4KdEEHmxoY5sT2+2DIvXW0rvMDP2nHZ4Mg==", + "dev": true + }, + "node_modules/@types/express": { + "version": "4.17.13", + "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz", + "integrity": "sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA==", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.18", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-jwt": { + "version": "0.0.42", + "resolved": "https://registry.npmjs.org/@types/express-jwt/-/express-jwt-0.0.42.tgz", + "integrity": "sha512-WszgUddvM1t5dPpJ3LhWNH8kfNN8GPIBrAGxgIYXVCEGx6Bx4A036aAuf/r5WH9DIEdlmp7gHOYvSM6U87B0ag==", + "dependencies": { + "@types/express": "*", + "@types/express-unless": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "4.17.24", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.24.tgz", + "integrity": "sha512-3UJuW+Qxhzwjq3xhwXm2onQcFHn76frIYVbTu+kn24LFxI+dEhdfISDFovPB8VpEgW8oQCTpRuCe+0zJxB7NEA==", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*" + } + }, + "node_modules/@types/express-unless": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/@types/express-unless/-/express-unless-0.5.2.tgz", + "integrity": "sha512-Q74UyYRX/zIgl1HSp9tUX2PlG8glkVm+59r7aK4KGKzC5jqKIOX6rrVLRQrzpZUQ84VukHtRoeAuon2nIssHPQ==", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/geojson": { + "version": "7946.0.8", + "resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.8.tgz", + "integrity": "sha512-1rkryxURpr6aWP7R786/UQOkJ3PcpQiWkAXBmdWc7ryFWqN6a4xfK7BtjXvFBKO9LjQ+MWQSWxYeZX1OApnArA==", + "dev": true + }, + "node_modules/@types/glob": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/@types/glob/-/glob-7.2.0.tgz", + "integrity": "sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA==", + "dev": true, + "dependencies": { + "@types/minimatch": "*", + "@types/node": "*" + } + }, + "node_modules/@types/glob-stream": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@types/glob-stream/-/glob-stream-6.1.1.tgz", + "integrity": "sha512-AGOUTsTdbPkRS0qDeyeS+6KypmfVpbT5j23SN8UPG63qjKXNKjXn6V9wZUr8Fin0m9l8oGYaPK8b2WUMF8xI1A==", + "dev": true, + "dependencies": { + "@types/glob": "*", + "@types/node": "*" + } + }, + "node_modules/@types/gulp": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/gulp/-/gulp-4.0.9.tgz", + "integrity": "sha512-zzT+wfQ8uwoXjDhRK9Zkmmk09/fbLLmN/yDHFizJiEKIve85qutOnXcP/TM2sKPBTU+Jc16vfPbOMkORMUBN7Q==", + "dev": true, + "dependencies": { + "@types/undertaker": "*", + "@types/vinyl-fs": "*", + "chokidar": "^3.3.1" + } + }, + "node_modules/@types/json-schema": { + "version": "7.0.9", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.9.tgz", + "integrity": "sha512-qcUXuemtEu+E5wZSJHNxUXeCZhAfXKQ41D+duX+VYPde7xyEVZci+/oXKJL13tnRs9lR2pr4fod59GT6/X1/yQ==", + "dev": true + }, + "node_modules/@types/json5": { + "version": "0.0.29", + "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz", + "integrity": "sha1-7ihweulOEdK4J7y+UnC86n8+ce4=", + "dev": true, + "optional": true + }, + "node_modules/@types/jsonpath": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/@types/jsonpath/-/jsonpath-0.2.0.tgz", + "integrity": "sha512-v7qlPA0VpKUlEdhghbDqRoKMxFB3h3Ch688TApBJ6v+XLDdvWCGLJIYiPKGZnS6MAOie+IorCfNYVHOPIHSWwQ==", + "dev": true + }, + "node_modules/@types/jsonwebtoken": { + "version": "8.5.5", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-8.5.5.tgz", + "integrity": "sha512-OGqtHQ7N5/Ap/TUwO6IgHDuLiAoTmHhGpNvgkCm/F4N6pKzx/RBSfr2OXZSwC6vkfnsEdb6+7DNZVtiXiwdwFw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/lodash": { + "version": "4.14.176", + "resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.14.176.tgz", + "integrity": "sha512-xZmuPTa3rlZoIbtDUyJKZQimJV3bxCmzMIO2c9Pz9afyDro6kr7R79GwcB6mRhuoPmV2p1Vb66WOJH7F886WKQ==", + "dev": true + }, + "node_modules/@types/mime": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.3.tgz", + "integrity": "sha512-Jus9s4CDbqwocc5pOAnh8ShfrnMcPHuJYzVcSUU7lrh8Ni5HuIqX3oilL86p3dlTrk0LzHRCgA/GQ7uNCw6l2Q==", + "dev": true + }, + "node_modules/@types/minimatch": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@types/minimatch/-/minimatch-3.0.5.tgz", + "integrity": "sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==", + "dev": true + }, + "node_modules/@types/mocha": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@types/mocha/-/mocha-9.0.0.tgz", + "integrity": "sha512-scN0hAWyLVAvLR9AyW7HoFF5sJZglyBsbPuHO4fv7JRvfmPBMfp1ozWqOf/e4wwPNxezBZXRfWzMb6iFLgEVRA==", + "dev": true + }, + "node_modules/@types/multer": { + "version": "1.4.7", + "resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.7.tgz", + "integrity": "sha512-/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA==", + "dev": true, + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/node": { + "version": "14.14.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.45.tgz", + "integrity": "sha512-DssMqTV9UnnoxDWu959sDLZzfvqCF0qDNRjaWeYSui9xkFe61kKo4l1TWNTQONpuXEm+gLMRvdlzvNHBamzmEw==" + }, + "node_modules/@types/pg": { + "version": "8.6.1", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.6.1.tgz", + "integrity": "sha512-1Kc4oAGzAl7uqUStZCDvaLFqZrW9qWSjXOmBfdgyBP5La7Us6Mg4GBvRlSoaZMhQF/zSj1C8CtKMBkoiT8eL8w==", + "dev": true, + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, + "node_modules/@types/qs": { + "version": "6.9.7", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.7.tgz", + "integrity": "sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==" + }, + "node_modules/@types/range-parser": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.4.tgz", + "integrity": "sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==" + }, + "node_modules/@types/serve-static": { + "version": "1.13.10", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.10.tgz", + "integrity": "sha512-nCkHGI4w7ZgAdNkrEu0bv+4xNV/XDqW+DydknebMOQwkpDGx8G+HTlj7R7ABI8i8nKxVw0wtKPi1D+lPOkh4YQ==", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/serve-static/node_modules/@types/mime": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.2.tgz", + "integrity": "sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==" + }, + "node_modules/@types/sinon": { + "version": "10.0.4", + "resolved": "https://registry.npmjs.org/@types/sinon/-/sinon-10.0.4.tgz", + "integrity": "sha512-fOYjrxQv8zJsqOY6V6ecP4eZhQBxtY80X0er1VVnUIAIZo74jHm8e1vguG5Yt4Iv8W2Wr7TgibB8MfRe32k9pA==", + "dev": true, + "dependencies": { + "@sinonjs/fake-timers": "^7.1.0" + } + }, + "node_modules/@types/sinon-chai": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/@types/sinon-chai/-/sinon-chai-3.2.5.tgz", + "integrity": "sha512-bKQqIpew7mmIGNRlxW6Zli/QVyc3zikpGzCa797B/tRnD9OtHvZ/ts8sYXV+Ilj9u3QRaUEM8xrjgd1gwm1BpQ==", + "dev": true, + "dependencies": { + "@types/chai": "*", + "@types/sinon": "*" + } + }, + "node_modules/@types/swagger-ui-express": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/@types/swagger-ui-express/-/swagger-ui-express-4.1.3.tgz", + "integrity": "sha512-jqCjGU/tGEaqIplPy3WyQg+Nrp6y80DCFnDEAvVKWkJyv0VivSSDCChkppHRHAablvInZe6pijDFMnavtN0vqA==", + "dev": true, + "dependencies": { + "@types/express": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/undertaker": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/undertaker/-/undertaker-1.2.7.tgz", + "integrity": "sha512-xuY7nBwo1zSRoY2aitp/HArHfTulFAKql2Fr4b4mWbBBP+F50n7Jm6nwISTTMaDk2xvl92O10TTejVF0Q9mInw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "@types/undertaker-registry": "*", + "async-done": "~1.3.2" + } + }, + "node_modules/@types/undertaker-registry": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@types/undertaker-registry/-/undertaker-registry-1.0.1.tgz", + "integrity": "sha512-Z4TYuEKn9+RbNVk1Ll2SS4x1JeLHecolIbM/a8gveaHsW0Hr+RQMraZACwTO2VD7JvepgA6UO1A1VrbktQrIbQ==", + "dev": true + }, + "node_modules/@types/utm": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/utm/-/utm-1.1.1.tgz", + "integrity": "sha512-iIqxs3T7X2drAkeTn0DE3Xpcx5z0MHXJFnjvvWCTzZDF0CnM5D2CGn4eEFZi+uHNnCXx4j/3vA8MO6+xXpQYqA==", + "dev": true + }, + "node_modules/@types/uuid": { + "version": "8.3.1", + "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.1.tgz", + "integrity": "sha512-Y2mHTRAbqfFkpjldbkHGY8JIzRN6XqYRliG8/24FcHm2D2PwW24fl5xMRTVGdrb7iMrwCaIEbLWerGIkXuFWVg==", + "dev": true + }, + "node_modules/@types/vinyl": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/vinyl/-/vinyl-2.0.6.tgz", + "integrity": "sha512-ayJ0iOCDNHnKpKTgBG6Q6JOnHTj9zFta+3j2b8Ejza0e4cvRyMn0ZoLEmbPrTHe5YYRlDYPvPWVdV4cTaRyH7g==", + "dev": true, + "dependencies": { + "@types/expect": "^1.20.4", + "@types/node": "*" + } + }, + "node_modules/@types/vinyl-fs": { + "version": "2.4.12", + "resolved": "https://registry.npmjs.org/@types/vinyl-fs/-/vinyl-fs-2.4.12.tgz", + "integrity": "sha512-LgBpYIWuuGsihnlF+OOWWz4ovwCYlT03gd3DuLwex50cYZLmX3yrW+sFF9ndtmh7zcZpS6Ri47PrIu+fV+sbXw==", + "dev": true, + "dependencies": { + "@types/glob-stream": "*", + "@types/node": "*", + "@types/vinyl": "*" + } + }, + "node_modules/@types/xml2js": { + "version": "0.4.9", + "resolved": "https://registry.npmjs.org/@types/xml2js/-/xml2js-0.4.9.tgz", + "integrity": "sha512-CHiCKIihl1pychwR2RNX5mAYmJDACgFVCMT5OArMaO3erzwXVcBqPcusr+Vl8yeeXukxZqtF8mZioqX+mpjjdw==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/yamljs": { + "version": "0.2.31", + "resolved": "https://registry.npmjs.org/@types/yamljs/-/yamljs-0.2.31.tgz", + "integrity": "sha512-QcJ5ZczaXAqbVD3o8mw/mEBhRvO5UAdTtbvgwL/OgoWubvNBh6/MxLBAigtcgIFaq3shon9m3POIxQaLQt4fxQ==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.33.0.tgz", + "integrity": "sha512-aINiAxGVdOl1eJyVjaWn/YcVAq4Gi/Yo35qHGCnqbWVz61g39D0h23veY/MA0rFFGfxK7TySg2uwDeNv+JgVpg==", + "dev": true, + "dependencies": { + "@typescript-eslint/experimental-utils": "4.33.0", + "@typescript-eslint/scope-manager": "4.33.0", + "debug": "^4.3.1", + "functional-red-black-tree": "^1.0.1", + "ignore": "^5.1.8", + "regexpp": "^3.1.0", + "semver": "^7.3.5", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "@typescript-eslint/parser": "^4.0.0", + "eslint": "^5.0.0 || ^6.0.0 || ^7.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/eslint-plugin/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@typescript-eslint/experimental-utils": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-4.33.0.tgz", + "integrity": "sha512-zeQjOoES5JFjTnAhI5QY7ZviczMzDptls15GFsI6jyUOq0kOf9+WonkhtlIhh0RgHRnqj5gdNxW5j1EvAyYg6Q==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.7", + "@typescript-eslint/scope-manager": "4.33.0", + "@typescript-eslint/types": "4.33.0", + "@typescript-eslint/typescript-estree": "4.33.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^3.0.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "*" + } + }, + "node_modules/@typescript-eslint/parser": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-4.33.0.tgz", + "integrity": "sha512-ZohdsbXadjGBSK0/r+d87X0SBmKzOq4/S5nzK6SBgJspFo9/CUDJ7hjayuze+JK7CZQLDMroqytp7pOcFKTxZA==", + "dev": true, + "dependencies": { + "@typescript-eslint/scope-manager": "4.33.0", + "@typescript-eslint/types": "4.33.0", + "@typescript-eslint/typescript-estree": "4.33.0", + "debug": "^4.3.1" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependencies": { + "eslint": "^5.0.0 || ^6.0.0 || ^7.0.0" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/parser/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/scope-manager": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-4.33.0.tgz", + "integrity": "sha512-5IfJHpgTsTZuONKbODctL4kKuQje/bzBRkwHE8UOZ4f89Zeddg+EGZs8PD8NcN4LdM3ygHWYB3ukPAYjvl/qbQ==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "4.33.0", + "@typescript-eslint/visitor-keys": "4.33.0" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/types": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-4.33.0.tgz", + "integrity": "sha512-zKp7CjQzLQImXEpLt2BUw1tvOMPfNoTAfb8l51evhYbOEEzdWyQNmHWWGPR6hwKJDAi+1VXSBmnhL9kyVTTOuQ==", + "dev": true, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@typescript-eslint/typescript-estree": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-4.33.0.tgz", + "integrity": "sha512-rkWRY1MPFzjwnEVHsxGemDzqqddw2QbTJlICPD9p9I9LfsO8fdmfQPOX3uKfUaGRDFJbfrtm/sXhVXN4E+bzCA==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "4.33.0", + "@typescript-eslint/visitor-keys": "4.33.0", + "debug": "^4.3.1", + "globby": "^11.0.3", + "is-glob": "^4.0.1", + "semver": "^7.3.5", + "tsutils": "^3.21.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/fast-glob": { + "version": "3.2.11", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.11.tgz", + "integrity": "sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/globby": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz", + "integrity": "sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.2.9", + "ignore": "^5.2.0", + "merge2": "^1.4.1", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/ignore": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.2.0.tgz", + "integrity": "sha512-CmxgYGiEPCLhfLnpPp1MoRmifwEIOgjcHXxOBjv7mY96c+eWScsOP9c112ZyLdWHi0FxHjI+4uVhKYp/gcdRmQ==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/micromatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.4.tgz", + "integrity": "sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.2.3" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/@typescript-eslint/typescript-estree/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@typescript-eslint/visitor-keys": { + "version": "4.33.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-4.33.0.tgz", + "integrity": "sha512-uqi/2aSz9g2ftcHWf8uLPJA70rUv6yuMW5Bohw+bwcuzaxQIHaKFZCKGoGXIrc9vkTJ3+0txM73K0Hq3d5wgIg==", + "dev": true, + "dependencies": { + "@typescript-eslint/types": "4.33.0", + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^8.10.0 || ^10.13.0 || >=11.10.1" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/typescript-eslint" + } + }, + "node_modules/@ungap/promise-all-settled": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", + "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", + "dev": true + }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "dev": true + }, + "node_modules/accepts": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.7.tgz", + "integrity": "sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA==", + "dependencies": { + "mime-types": "~2.1.24", + "negotiator": "0.6.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "7.4.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.4.1.tgz", + "integrity": "sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/acorn-walk": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.2.0.tgz", + "integrity": "sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/adler-32": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz", + "integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/adm-zip": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.5.tgz", + "integrity": "sha512-IWwXKnCbirdbyXSfUDvCCrmYrOHANRZcc8NcRrvTlIApdl7PwE9oGcsYvNeJPAVY1M+70b4PxXGKIf8AEuiQ6w==", + "engines": { + "node": ">=6.0" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", + "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", + "dev": true, + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz", + "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ansi-align": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/ansi-align/-/ansi-align-3.0.1.tgz", + "integrity": "sha512-IOfwwBF5iczOjp/WeY4YxyjqAFMQoZufdQWDd19SEExbVLNXqvpzSJ/M7Za4/sCPmQ0+GRquoA7bGcINcxew6w==", + "dev": true, + "dependencies": { + "string-width": "^4.1.0" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-gray": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/ansi-gray/-/ansi-gray-0.1.1.tgz", + "integrity": "sha1-KWLPVOyXksSFEKPetSRDaGHvclE=", + "dev": true, + "dependencies": { + "ansi-wrap": "0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/ansi-wrap": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/ansi-wrap/-/ansi-wrap-0.1.0.tgz", + "integrity": "sha1-qCJQ3bABXponyoLoLqYDu/pF768=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/append-buffer": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/append-buffer/-/append-buffer-1.0.2.tgz", + "integrity": "sha1-2CIM9GYIFSXv6lBhTz3mUU36WPE=", + "dev": true, + "dependencies": { + "buffer-equal": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/append-field": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/append-field/-/append-field-1.0.0.tgz", + "integrity": "sha1-HjRA6RXwsSA9I3SOeO3XubW0PlY=" + }, + "node_modules/append-transform": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz", + "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==", + "dev": true, + "dependencies": { + "default-require-extensions": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/archy": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz", + "integrity": "sha1-+cjBN1fMHde8N5rHeyxipcKGjEA=", + "dev": true + }, + "node_modules/arg": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", + "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==", + "dev": true + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/arr-diff": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", + "integrity": "sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arr-filter": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/arr-filter/-/arr-filter-1.1.2.tgz", + "integrity": "sha1-Q/3d0JHo7xGqTEXZzcGOLf8XEe4=", + "dev": true, + "dependencies": { + "make-iterator": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arr-flatten": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/arr-flatten/-/arr-flatten-1.1.0.tgz", + "integrity": "sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arr-map": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/arr-map/-/arr-map-2.0.2.tgz", + "integrity": "sha1-Onc0X/wc814qkYJWAfnljy4kysQ=", + "dev": true, + "dependencies": { + "make-iterator": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arr-union": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/arr-union/-/arr-union-3.1.0.tgz", + "integrity": "sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-each": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-each/-/array-each-1.0.1.tgz", + "integrity": "sha1-p5SvDAWrF1KEbudTofIRoFugxE8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", + "integrity": "sha1-ml9pkFGx5wczKPKgCJaLZOopVdI=" + }, + "node_modules/array-initial": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/array-initial/-/array-initial-1.1.0.tgz", + "integrity": "sha1-L6dLJnOTccOUe9enrcc74zSz15U=", + "dev": true, + "dependencies": { + "array-slice": "^1.0.0", + "is-number": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-initial/node_modules/is-number": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", + "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-last": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/array-last/-/array-last-1.3.0.tgz", + "integrity": "sha512-eOCut5rXlI6aCOS7Z7kCplKRKyiFQ6dHFBem4PwlwKeNFk2/XxTrhRh5T9PyaEWGy/NHTZWbY+nsZlNFJu9rYg==", + "dev": true, + "dependencies": { + "is-number": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-last/node_modules/is-number": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-4.0.0.tgz", + "integrity": "sha512-rSklcAIlf1OmFdyAqbnWTLVelsQ58uvZ66S/ZyawjWqIviTWCjg2PzVGw8WUA+nNuPTqb4wgA+NszrJ+08LlgQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-slice": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/array-slice/-/array-slice-1.1.0.tgz", + "integrity": "sha512-B1qMD3RBP7O8o0H2KbrXDyB0IccejMF15+87Lvlor12ONPRHP6gTjXMNkt/d3ZuOGbAe66hFmaCfECI24Ufp6w==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-sort": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/array-sort/-/array-sort-1.0.0.tgz", + "integrity": "sha512-ihLeJkonmdiAsD7vpgN3CRcx2J2S0TiYW+IS/5zHBI7mKUq3ySvBdzzBfD236ubDBQFiiyG3SWCPc+msQ9KoYg==", + "dev": true, + "dependencies": { + "default-compare": "^1.0.0", + "get-value": "^2.0.6", + "kind-of": "^5.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-sort/node_modules/kind-of": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", + "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/array-union": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/array-union/-/array-union-2.1.0.tgz", + "integrity": "sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/array-unique": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", + "integrity": "sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/arrify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz", + "integrity": "sha1-iYUI2iIm84DfkEcoRWhJwVAaSw0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/asn1": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", + "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", + "dependencies": { + "safer-buffer": "~2.1.0" + } + }, + "node_modules/assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/assign-symbols": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", + "integrity": "sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/astral-regex": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/astral-regex/-/astral-regex-2.0.0.tgz", + "integrity": "sha512-Z7tMw1ytTXt5jqMcOP+OQteU1VuNK9Y02uuJtKQ1Sv69jXQKKg5cibLwGJow8yzZP+eAc18EmLGPal0bp36rvQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/async": { + "version": "0.9.2", + "resolved": "https://registry.npmjs.org/async/-/async-0.9.2.tgz", + "integrity": "sha1-rqdNXmHB+JlhO/ZL2mbUx48v0X0=" + }, + "node_modules/async-done": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/async-done/-/async-done-1.3.2.tgz", + "integrity": "sha512-uYkTP8dw2og1tu1nmza1n1CMW0qb8gWWlwqMmLb7MhBVs4BXrFziT6HXUd+/RlRA/i4H9AkofYloUbs1fwMqlw==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.2", + "process-nextick-args": "^2.0.0", + "stream-exhaust": "^1.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/async-each": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/async-each/-/async-each-1.0.3.tgz", + "integrity": "sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ==", + "dev": true + }, + "node_modules/async-settle": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async-settle/-/async-settle-1.0.0.tgz", + "integrity": "sha1-HQqRS7Aldb7IqPOnTlCA9yssDGs=", + "dev": true, + "dependencies": { + "async-done": "^1.2.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, + "node_modules/atob": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/atob/-/atob-2.1.2.tgz", + "integrity": "sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg==", + "dev": true, + "bin": { + "atob": "bin/atob.js" + }, + "engines": { + "node": ">= 4.5.0" + } + }, + "node_modules/aws-sdk": { + "version": "2.742.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.742.0.tgz", + "integrity": "sha512-zntDB0BpMn/y+B4RQvXuqY8DmJDYPkeFjZ6BbZ6vdNrsdB5TRz8p53ats4D3mLG068RB4M4AmVioFnU69nDXyQ==", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.15.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "uuid": "3.3.2", + "xml2js": "0.4.19" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/aws-sdk/node_modules/uuid": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", + "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/aws-sdk/node_modules/xml2js": { + "version": "0.4.19", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.19.tgz", + "integrity": "sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~9.0.1" + } + }, + "node_modules/aws-sdk/node_modules/xmlbuilder": { + "version": "9.0.7", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-9.0.7.tgz", + "integrity": "sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0=", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/axios": { + "version": "0.21.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.4.tgz", + "integrity": "sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==", + "dependencies": { + "follow-redirects": "^1.14.0" + } + }, + "node_modules/bach": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/bach/-/bach-1.2.0.tgz", + "integrity": "sha1-Szzpa/JxNPeaG0FKUcFONMO9mIA=", + "dev": true, + "dependencies": { + "arr-filter": "^1.1.1", + "arr-flatten": "^1.0.1", + "arr-map": "^2.0.0", + "array-each": "^1.0.0", + "array-initial": "^1.0.0", + "array-last": "^1.1.1", + "async-done": "^1.2.2", + "async-settle": "^1.0.0", + "now-and-later": "^2.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/balanced-match": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz", + "integrity": "sha1-ibTRmasr7kneFk6gK4nORi1xt2c=" + }, + "node_modules/base": { + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/base/-/base-0.11.2.tgz", + "integrity": "sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg==", + "dev": true, + "dependencies": { + "cache-base": "^1.0.1", + "class-utils": "^0.3.5", + "component-emitter": "^1.2.1", + "define-property": "^1.0.0", + "isobject": "^3.0.1", + "mixin-deep": "^1.2.0", + "pascalcase": "^0.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base/node_modules/define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base/node_modules/is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base/node_modules/is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base/node_modules/is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dev": true, + "optional": true, + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" + }, + "node_modules/body-parser": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.19.0.tgz", + "integrity": "sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw==", + "dependencies": { + "bytes": "3.1.0", + "content-type": "~1.0.4", + "debug": "2.6.9", + "depd": "~1.1.2", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "on-finished": "~2.3.0", + "qs": "6.7.0", + "raw-body": "2.4.0", + "type-is": "~1.6.17" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/qs": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/boxen": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/boxen/-/boxen-5.1.2.tgz", + "integrity": "sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==", + "dev": true, + "dependencies": { + "ansi-align": "^3.0.0", + "camelcase": "^6.2.0", + "chalk": "^4.1.0", + "cli-boxes": "^2.2.1", + "string-width": "^4.2.2", + "type-fest": "^0.20.2", + "widest-line": "^3.1.0", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/boxen/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/boxen/node_modules/camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/boxen/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/boxen/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/boxen/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/boxen/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-2.3.2.tgz", + "integrity": "sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w==", + "dev": true, + "dependencies": { + "arr-flatten": "^1.1.0", + "array-unique": "^0.3.2", + "extend-shallow": "^2.0.1", + "fill-range": "^4.0.0", + "isobject": "^3.0.1", + "repeat-element": "^1.1.2", + "snapdragon": "^0.8.1", + "snapdragon-node": "^2.0.1", + "split-string": "^3.0.2", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/braces/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "dependencies": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, + "node_modules/buffer-equal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/buffer-equal/-/buffer-equal-1.0.0.tgz", + "integrity": "sha1-WWFrSYME1Var1GaWayLu2j7KX74=", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/buffer-equal-constant-time": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz", + "integrity": "sha1-+OcRMvf/5uAaXJaXpMbz5I1cyBk=" + }, + "node_modules/buffer-from": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.1.tgz", + "integrity": "sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==" + }, + "node_modules/buffer-writer": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-2.0.0.tgz", + "integrity": "sha512-a7ZpuTZU1TRtnwyCNW3I5dc0wWNC3VR9S++Ewyk2HHZdrO3CQJqSpd+95Us590V6AL7JqUAH2IwZ/398PmNFgw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/busboy": { + "version": "0.2.14", + "resolved": "https://registry.npmjs.org/busboy/-/busboy-0.2.14.tgz", + "integrity": "sha1-bCpiLvz0fFe7vh4qnDetNseSVFM=", + "dependencies": { + "dicer": "0.2.5", + "readable-stream": "1.1.x" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/busboy/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "node_modules/busboy/node_modules/readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/busboy/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "node_modules/bytes": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", + "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cache-base": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/cache-base/-/cache-base-1.0.1.tgz", + "integrity": "sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ==", + "dev": true, + "dependencies": { + "collection-visit": "^1.0.0", + "component-emitter": "^1.2.1", + "get-value": "^2.0.6", + "has-value": "^1.0.0", + "isobject": "^3.0.1", + "set-value": "^2.0.0", + "to-object-path": "^0.3.0", + "union-value": "^1.0.0", + "unset-value": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/cacheable-request": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-6.1.0.tgz", + "integrity": "sha512-Oj3cAGPCqOZX7Rz64Uny2GYAZNliQSqfbePrgAQ1wKAihYmCUnraBtJtKcGR4xz7wF+LoJC+ssFZvv5BgF9Igg==", + "dev": true, + "dependencies": { + "clone-response": "^1.0.2", + "get-stream": "^5.1.0", + "http-cache-semantics": "^4.0.0", + "keyv": "^3.0.0", + "lowercase-keys": "^2.0.0", + "normalize-url": "^4.1.0", + "responselike": "^1.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacheable-request/node_modules/get-stream": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", + "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cacheable-request/node_modules/lowercase-keys": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", + "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacheable-request/node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/caching-transform": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz", + "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==", + "dev": true, + "dependencies": { + "hasha": "^5.0.0", + "make-dir": "^3.0.0", + "package-hash": "^4.0.0", + "write-file-atomic": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/call-bind": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.0.tgz", + "integrity": "sha512-AEXsYIyyDY3MCzbwdhzG3Jx1R0J2wetQyUynn6dYHAO+bg8l1k7jwZtRv4ryryFs7EP+NDlikJlVe59jr0cM2w==", + "dependencies": { + "function-bind": "^1.1.1", + "get-intrinsic": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/cfb": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/cfb/-/cfb-1.2.1.tgz", + "integrity": "sha512-wT2ScPAFGSVy7CY+aauMezZBnNrfnaLSrxHUHdea+Td/86vrk6ZquggV+ssBR88zNs0OnBkL2+lf9q0K+zVGzQ==", + "dependencies": { + "adler-32": "~1.3.0", + "crc-32": "~1.2.0", + "printj": "~1.3.0" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/cfb/node_modules/printj": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/printj/-/printj-1.3.0.tgz", + "integrity": "sha512-017o8YIaz8gLhaNxRB9eBv2mWXI2CtzhPJALnQTP+OPpuUfP0RMWqr/mHCzqVeu1AQxfzSfAtAq66vKB8y7Lzg==", + "bin": { + "printj": "bin/printj.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/chai": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.3.4.tgz", + "integrity": "sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==", + "dev": true, + "dependencies": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "pathval": "^1.1.1", + "type-detect": "^4.0.5" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/chokidar": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", + "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar/node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/chokidar/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/chokidar/node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ci-info": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", + "integrity": "sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ==", + "dev": true + }, + "node_modules/clamdjs": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clamdjs/-/clamdjs-1.0.2.tgz", + "integrity": "sha512-gVnX5ySMULvwYL2ykZQnP4UK4nIK7ftG6z015drJyOFgWpsqXt1Hcq4fMyPwM8LLsxfgfYKLiZi288xuTfmZBQ==" + }, + "node_modules/class-utils": { + "version": "0.3.6", + "resolved": "https://registry.npmjs.org/class-utils/-/class-utils-0.3.6.tgz", + "integrity": "sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg==", + "dev": true, + "dependencies": { + "arr-union": "^3.1.0", + "define-property": "^0.2.5", + "isobject": "^3.0.0", + "static-extend": "^0.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/class-utils/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", + "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-boxes": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/cli-boxes/-/cli-boxes-2.2.1.tgz", + "integrity": "sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==", + "dev": true, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "node_modules/clone": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/clone/-/clone-2.1.2.tgz", + "integrity": "sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=", + "dev": true, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clone-buffer": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clone-buffer/-/clone-buffer-1.0.0.tgz", + "integrity": "sha1-4+JbIHrE5wGvch4staFnksrD3Fg=", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/clone-response": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", + "integrity": "sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=", + "dev": true, + "dependencies": { + "mimic-response": "^1.0.0" + } + }, + "node_modules/clone-stats": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/clone-stats/-/clone-stats-1.0.0.tgz", + "integrity": "sha1-s3gt/4u1R04Yuba/D9/ngvh3doA=", + "dev": true + }, + "node_modules/cloneable-readable": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/cloneable-readable/-/cloneable-readable-1.1.3.tgz", + "integrity": "sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ==", + "dev": true, + "dependencies": { + "inherits": "^2.0.1", + "process-nextick-args": "^2.0.0", + "readable-stream": "^2.3.5" + } + }, + "node_modules/cloneable-readable/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/cloneable-readable/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/cloneable-readable/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/code-point-at": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/code-point-at/-/code-point-at-1.1.0.tgz", + "integrity": "sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/codepage": { + "version": "1.15.0", + "resolved": "https://registry.npmjs.org/codepage/-/codepage-1.15.0.tgz", + "integrity": "sha512-3g6NUTPd/YtuuGrhMnOMRjFc+LJw/bnMp3+0r/Wcz3IXUuCosKRJvMphm5+Q+bvTVGcJJuRvVLuYba+WojaFaA==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/collection-map": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/collection-map/-/collection-map-1.0.0.tgz", + "integrity": "sha1-rqDwb40mx4DCt1SUOFVEsiVa8Yw=", + "dev": true, + "dependencies": { + "arr-map": "^2.0.2", + "for-own": "^1.0.0", + "make-iterator": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/collection-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/collection-visit/-/collection-visit-1.0.0.tgz", + "integrity": "sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA=", + "dev": true, + "dependencies": { + "map-visit": "^1.0.0", + "object-visit": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/color/-/color-3.0.0.tgz", + "integrity": "sha512-jCpd5+s0s0t7p3pHQKpnJ0TpQKKdleP71LWcA0aqiljpiuAkOSUFN/dyH8ZwF0hRmFlrIuRhufds1QyEP9EB+w==", + "dependencies": { + "color-convert": "^1.9.1", + "color-string": "^1.5.2" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/color-string": { + "version": "1.5.4", + "resolved": "https://registry.npmjs.org/color-string/-/color-string-1.5.4.tgz", + "integrity": "sha512-57yF5yt8Xa3czSEW1jfQDE79Idk0+AkN/4KWad6tbdxUmAs3MvjxlWSWD4deYytcRfoZ9nhKyFl1kj5tBvidbw==", + "dependencies": { + "color-name": "^1.0.0", + "simple-swizzle": "^0.2.2" + } + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "dev": true, + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/color/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "node_modules/colorette": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==" + }, + "node_modules/colors": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.4.0.tgz", + "integrity": "sha512-a+UqTh4kgZg/SlGvfbzDHpgRu7AAQOmmqRHJnxhRZICKFUT91brVhNNt58CMWU9PsBbv3PDCZUHbVxuDiH2mtA==", + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/colorspace": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/colorspace/-/colorspace-1.1.2.tgz", + "integrity": "sha512-vt+OoIP2d76xLhjwbBaucYlNSpPsrJWPlBTtwCpQKIu6/CSMutyzX93O/Do0qzpH3YoHEes8YEFXyZ797rEhzQ==", + "dependencies": { + "color": "3.0.x", + "text-hex": "1.0.x" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "engines": { + "node": ">= 12" + } + }, + "node_modules/commondir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz", + "integrity": "sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs=", + "dev": true + }, + "node_modules/component-emitter": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/component-emitter/-/component-emitter-1.3.0.tgz", + "integrity": "sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=" + }, + "node_modules/concat-stream": { + "version": "1.6.2", + "resolved": "https://registry.npmjs.org/concat-stream/-/concat-stream-1.6.2.tgz", + "integrity": "sha512-27HBghJxjiZtIk3Ycvn/4kbJk/1uZuJFfuPEns6LaEvpvG1f0hTea8lilrouyo9mVc2GWdcEZ8OLoGmSADlrCw==", + "engines": [ + "node >= 0.8" + ], + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + } + }, + "node_modules/concat-stream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/concat-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/concat-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/configstore": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/configstore/-/configstore-5.0.1.tgz", + "integrity": "sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==", + "dev": true, + "dependencies": { + "dot-prop": "^5.2.0", + "graceful-fs": "^4.1.2", + "make-dir": "^3.0.0", + "unique-string": "^2.0.0", + "write-file-atomic": "^3.0.0", + "xdg-basedir": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/content-disposition": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.3.tgz", + "integrity": "sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g==", + "dependencies": { + "safe-buffer": "5.1.2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-disposition/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/content-type": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.4.tgz", + "integrity": "sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.7.0.tgz", + "integrity": "sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.1" + } + }, + "node_modules/convert-source-map/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/cookie": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.0.tgz", + "integrity": "sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", + "integrity": "sha1-4wOogrNCzD7oylE6eZmXNNqzriw=" + }, + "node_modules/copy-descriptor": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/copy-descriptor/-/copy-descriptor-0.1.1.tgz", + "integrity": "sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/copy-props": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/copy-props/-/copy-props-2.0.5.tgz", + "integrity": "sha512-XBlx8HSqrT0ObQwmSzM7WE5k8FxTV75h1DX1Z3n6NhQ/UYYAvInWYmG06vFt7hQZArE2fuO62aihiWIVQwh1sw==", + "dev": true, + "dependencies": { + "each-props": "^1.3.2", + "is-plain-object": "^5.0.0" + } + }, + "node_modules/copy-props/node_modules/is-plain-object": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz", + "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/core-util-is": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", + "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" + }, + "node_modules/crc-32": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==", + "bin": { + "crc32": "bin/crc32.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/create-require": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz", + "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cross-spawn/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/crypto-random-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/crypto-random-string/-/crypto-random-string-2.0.0.tgz", + "integrity": "sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cycle": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cycle/-/cycle-1.0.3.tgz", + "integrity": "sha1-IegLK+hYD5i0aPN5QwZisEbDStI=", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/d": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/d/-/d-1.0.1.tgz", + "integrity": "sha512-m62ShEObQ39CfralilEQRjH6oAMtNCV1xJyEx5LpRYUVN+EviphDgUc/F3hnYbADmkiNs67Y+3ylmlG7Lnu+FA==", + "dev": true, + "dependencies": { + "es5-ext": "^0.10.50", + "type": "^1.0.1" + } + }, + "node_modules/db-migrate": { + "version": "0.11.11", + "resolved": "https://registry.npmjs.org/db-migrate/-/db-migrate-0.11.11.tgz", + "integrity": "sha512-GHZodjB5hXRy+76ZIb9z0OrUn0qSeGfvS0cCfyzPeFCBZ1YU9o9HUBQ8pUT+v/fJ9+a29eRz2xQsLfccXZtf8g==", + "dependencies": { + "balanced-match": "^1.0.0", + "bluebird": "^3.1.1", + "db-migrate-shared": "^1.2.0", + "deep-extend": "^0.6.0", + "dotenv": "^5.0.1", + "final-fs": "^1.6.0", + "inflection": "^1.10.0", + "mkdirp": "~0.5.0", + "parse-database-url": "~0.3.0", + "prompt": "^1.0.0", + "rc": "^1.2.8", + "resolve": "^1.1.6", + "semver": "^5.3.0", + "tunnel-ssh": "^4.0.0", + "yargs": "^15.3.1" + }, + "bin": { + "db-migrate": "bin/db-migrate" + }, + "engines": { + "node": ">=8.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/node-db-migrate" + } + }, + "node_modules/db-migrate-base": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/db-migrate-base/-/db-migrate-base-2.3.0.tgz", + "integrity": "sha512-mxaCkSe7JC2uksvI/rKs+wOQGBSZ6B87xa4b3i+QhB+XRBpGdpMzldKE6INf+EnM6kwhbIPKjyJZgyxui9xBfQ==", + "dependencies": { + "bluebird": "^3.1.1" + } + }, + "node_modules/db-migrate-pg": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/db-migrate-pg/-/db-migrate-pg-1.2.2.tgz", + "integrity": "sha512-+rgrhGNWC2SzcfweopyZqOQ1Igz1RVFMUZwUs6SviHpOUzFwb0NZWkG0pw1GaO+JxTxS7VJjckUWkOwZbVYVag==", + "dependencies": { + "bluebird": "^3.1.1", + "db-migrate-base": "^2.3.0", + "pg": "^8.0.3", + "semver": "^5.0.3" + } + }, + "node_modules/db-migrate-shared": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/db-migrate-shared/-/db-migrate-shared-1.2.0.tgz", + "integrity": "sha512-65k86bVeHaMxb2L0Gw3y5V+CgZSRwhVQMwDMydmw5MvIpHHwD6SmBciqIwHsZfzJ9yzV/yYhdRefRM6FV5/siw==" + }, + "node_modules/db-migrate/node_modules/dotenv": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-5.0.1.tgz", + "integrity": "sha512-4As8uPrjfwb7VXC+WnLCbXK7y+Ueb2B3zgNCePYfhxS1PYeaO1YTeplffTEcbfLhvFNGLAz90VvJs9yomG7bow==", + "engines": { + "node": ">=4.6.0" + } + }, + "node_modules/debug": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.2.0.tgz", + "integrity": "sha512-IX2ncY78vDTjZMFUdmsvIRFY2Cf4FnD0wRs+nQwJU8Lu99/tPFdb0VybiiMTPe3I6rQmwsqQqRBvxU+bZ/I8sg==", + "deprecated": "Debug versions >=3.2.0 <3.2.7 || >=4 <4.3.1 have a low-severity ReDos regression when used in a Node.js environment. It is recommended you upgrade to 3.2.7 or 4.3.1. (https://github.com/visionmedia/debug/issues/797)", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/decode-uri-component": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.0.tgz", + "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/decompress-response": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", + "integrity": "sha1-gKTdMjdIOEv6JICDYirt7Jgq3/M=", + "dev": true, + "dependencies": { + "mimic-response": "^1.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "dependencies": { + "type-detect": "^4.0.0" + }, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/deep-equal": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-0.2.2.tgz", + "integrity": "sha1-hLdFiW80xoTpjyzg5Cq69Du6AX0=" + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/deep-is": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", + "integrity": "sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ=" + }, + "node_modules/default-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/default-compare/-/default-compare-1.0.0.tgz", + "integrity": "sha512-QWfXlM0EkAbqOCbD/6HjdwT19j7WCkMyiRhWilc4H9/5h/RzTF9gv5LYh1+CmDV5d1rki6KAWLtQale0xt20eQ==", + "dev": true, + "dependencies": { + "kind-of": "^5.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/default-compare/node_modules/kind-of": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", + "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/default-require-extensions": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.0.tgz", + "integrity": "sha512-ek6DpXq/SCpvjhpFsLFRVtIxJCRw6fUR42lYMVZuUMK7n8eMz4Uh5clckdBjEpLhn/gEBZo7hDJnJcwdKLKQjg==", + "dev": true, + "dependencies": { + "strip-bom": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/default-require-extensions/node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/default-resolution": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/default-resolution/-/default-resolution-2.0.0.tgz", + "integrity": "sha1-vLgrqnKtebQmp2cy8aga1t8m1oQ=", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/defer-to-connect": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-1.1.3.tgz", + "integrity": "sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ==", + "dev": true + }, + "node_modules/define-properties": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", + "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", + "dev": true, + "dependencies": { + "object-keys": "^1.0.12" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/define-property": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-2.0.2.tgz", + "integrity": "sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ==", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.2", + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-property/node_modules/is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-property/node_modules/is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/define-property/node_modules/is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/del": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/del/-/del-6.0.0.tgz", + "integrity": "sha512-1shh9DQ23L16oXSZKB2JxpL7iMy2E0S9d517ptA1P8iw0alkPtQcrKH7ru31rYtKwF499HkTu+DRzq3TCKDFRQ==", + "dev": true, + "dependencies": { + "globby": "^11.0.1", + "graceful-fs": "^4.2.4", + "is-glob": "^4.0.1", + "is-path-cwd": "^2.2.0", + "is-path-inside": "^3.0.2", + "p-map": "^4.0.0", + "rimraf": "^3.0.2", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/del/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/depd": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz", + "integrity": "sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/destroy": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.0.4.tgz", + "integrity": "sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=" + }, + "node_modules/detect-file": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/detect-file/-/detect-file-1.0.0.tgz", + "integrity": "sha1-8NZtA2cqglyxtzvbP+YjEMjlUrc=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/dicer": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/dicer/-/dicer-0.2.5.tgz", + "integrity": "sha1-WZbAhrszIYyBLAkL3cCc0S+stw8=", + "dependencies": { + "readable-stream": "1.1.x", + "streamsearch": "0.1.2" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/dicer/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=" + }, + "node_modules/dicer/node_modules/readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/dicer/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, + "node_modules/diff": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", + "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/difunc": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/difunc/-/difunc-0.0.4.tgz", + "integrity": "sha512-zBiL4ALDmviHdoLC0g0G6wVme5bwAow9WfhcZLLopXCAWgg3AEf7RYTs2xugszIGulRHzEVDF/SHl9oyQU07Pw==", + "dependencies": { + "esprima": "^4.0.0" + } + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/dir-glob/-/dir-glob-3.0.1.tgz", + "integrity": "sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==", + "dev": true, + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "dependencies": { + "esutils": "^2.0.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/dot-prop": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz", + "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==", + "dev": true, + "dependencies": { + "is-obj": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/duplexer3": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.4.tgz", + "integrity": "sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=", + "dev": true + }, + "node_modules/duplexify": { + "version": "3.7.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-3.7.1.tgz", + "integrity": "sha512-07z8uv2wMyS51kKhD1KsdXJg5WQ6t93RneqRxUHnskXVtlYYkLqM0gqStQZ3pj073g687jPCHrqNfCzawLYh5g==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.0.0", + "inherits": "^2.0.1", + "readable-stream": "^2.0.0", + "stream-shift": "^1.0.0" + } + }, + "node_modules/duplexify/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/duplexify/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/duplexify/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/each-props": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/each-props/-/each-props-1.3.2.tgz", + "integrity": "sha512-vV0Hem3zAGkJAyU7JSjixeU66rwdynTAa1vofCrSA5fEln+m67Az9CcnkVD776/fsN/UjIWmBDoNRS6t6G9RfA==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.1", + "object.defaults": "^1.1.0" + } + }, + "node_modules/ecdsa-sig-formatter": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/ecdsa-sig-formatter/-/ecdsa-sig-formatter-1.0.11.tgz", + "integrity": "sha512-nagl3RYrbNv6kQkeJIpt6NJZy8twLB/2vtz6yN9Z4vRKHN4/QZJIEbqohALSgwKdnksuY3k5Addp5lg8sVoVcQ==", + "dependencies": { + "safe-buffer": "^5.0.1" + } + }, + "node_modules/ee-first": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", + "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" + }, + "node_modules/enabled": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/enabled/-/enabled-2.0.0.tgz", + "integrity": "sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ==" + }, + "node_modules/encodeurl": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", + "integrity": "sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/end-of-stream": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", + "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", + "dev": true, + "dependencies": { + "once": "^1.4.0" + } + }, + "node_modules/enquirer": { + "version": "2.3.6", + "resolved": "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz", + "integrity": "sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg==", + "dev": true, + "dependencies": { + "ansi-colors": "^4.1.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/error-ex/node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0=", + "dev": true + }, + "node_modules/es-abstract": { + "version": "1.18.0-next.1", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.18.0-next.1.tgz", + "integrity": "sha512-I4UGspA0wpZXWENrdA0uHbnhte683t3qT/1VFH9aX2dA5PPSf6QW5HHXf5HImaqPmjXaVeVk4RGWnaylmV7uAA==", + "dev": true, + "dependencies": { + "es-to-primitive": "^1.2.1", + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1", + "is-callable": "^1.2.2", + "is-negative-zero": "^2.0.0", + "is-regex": "^1.1.1", + "object-inspect": "^1.8.0", + "object-keys": "^1.1.1", + "object.assign": "^4.1.1", + "string.prototype.trimend": "^1.0.1", + "string.prototype.trimstart": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es-to-primitive": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", + "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", + "dev": true, + "dependencies": { + "is-callable": "^1.1.4", + "is-date-object": "^1.0.1", + "is-symbol": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/es5-ext": { + "version": "0.10.53", + "resolved": "https://registry.npmjs.org/es5-ext/-/es5-ext-0.10.53.tgz", + "integrity": "sha512-Xs2Stw6NiNHWypzRTY1MtaG/uJlwCk8kH81920ma8mvN8Xq1gsfhZvpkImLQArw8AHnv8MT2I45J3c0R8slE+Q==", + "dev": true, + "dependencies": { + "es6-iterator": "~2.0.3", + "es6-symbol": "~3.1.3", + "next-tick": "~1.0.0" + } + }, + "node_modules/es6-error": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz", + "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==", + "dev": true + }, + "node_modules/es6-iterator": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-iterator/-/es6-iterator-2.0.3.tgz", + "integrity": "sha1-p96IkUGgWpSwhUQDstCg+/qY87c=", + "dev": true, + "dependencies": { + "d": "1", + "es5-ext": "^0.10.35", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/es6-symbol": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/es6-symbol/-/es6-symbol-3.1.3.tgz", + "integrity": "sha512-NJ6Yn3FuDinBaBRWl/q5X/s4koRHBrgKAu+yGI6JCBeiu3qrcbJhwT2GeR/EXVfylRk8dpQVJoLEFhK+Mu31NA==", + "dev": true, + "dependencies": { + "d": "^1.0.1", + "ext": "^1.1.2" + } + }, + "node_modules/es6-weak-map": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es6-weak-map/-/es6-weak-map-2.0.3.tgz", + "integrity": "sha512-p5um32HOTO1kP+w7PRnB+5lQ43Z6muuMuIMffvDN8ZB4GcnjLBV6zGStpbASIMk4DCAvEaamhe2zhyCb/QXXsA==", + "dev": true, + "dependencies": { + "d": "1", + "es5-ext": "^0.10.46", + "es6-iterator": "^2.0.3", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-goat": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/escape-goat/-/escape-goat-2.1.1.tgz", + "integrity": "sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", + "integrity": "sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg=" + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ=", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=4.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/escodegen/node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4=", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/escodegen/node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/escodegen/node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/escodegen/node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha1-WITKtRLPHTVeP7eE8wgEsrUg23I=", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/eslint": { + "version": "7.32.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-7.32.0.tgz", + "integrity": "sha512-VHZ8gX+EDfz+97jGcgyGCyRia/dPOd6Xh9yPv8Bl1+SoaIwD+a/vlrOmGRUyOYu7MwUhc7CxqeaDZU13S4+EpA==", + "dev": true, + "dependencies": { + "@babel/code-frame": "7.12.11", + "@eslint/eslintrc": "^0.4.3", + "@humanwhocodes/config-array": "^0.5.0", + "ajv": "^6.10.0", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "enquirer": "^2.3.5", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^5.1.1", + "eslint-utils": "^2.1.0", + "eslint-visitor-keys": "^2.0.0", + "espree": "^7.3.1", + "esquery": "^1.4.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^6.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.1.2", + "globals": "^13.6.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.4.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.0.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.1", + "progress": "^2.0.0", + "regexpp": "^3.1.0", + "semver": "^7.2.1", + "strip-ansi": "^6.0.0", + "strip-json-comments": "^3.1.0", + "table": "^6.0.9", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-config-prettier": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.15.0.tgz", + "integrity": "sha512-a1+kOYLR8wMGustcgAjdydMsQ2A/2ipRPwRKUmfYaSxc9ZPcrku080Ctl6zrZzZNs/U82MjSv+qKREkoq3bJaw==", + "dev": true, + "dependencies": { + "get-stdin": "^6.0.0" + }, + "bin": { + "eslint-config-prettier-check": "bin/cli.js" + }, + "peerDependencies": { + "eslint": ">=3.14.1" + } + }, + "node_modules/eslint-plugin-prettier": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.3.1.tgz", + "integrity": "sha512-Rq3jkcFY8RYeQLgk2cCwuc0P7SEFwDravPhsJZOQ5N4YI4DSg50NyqJ/9gdZHzQlHf8MvafSesbNJCcP/FF6pQ==", + "dev": true, + "dependencies": { + "prettier-linter-helpers": "^1.0.0" + }, + "engines": { + "node": ">=6.0.0" + }, + "peerDependencies": { + "eslint": ">=5.0.0", + "prettier": ">=1.13.0" + }, + "peerDependenciesMeta": { + "eslint-config-prettier": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/eslint-utils": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-3.0.0.tgz", + "integrity": "sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^2.0.0" + }, + "engines": { + "node": "^10.0.0 || ^12.0.0 || >= 14.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + }, + "peerDependencies": { + "eslint": ">=5" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz", + "integrity": "sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint/node_modules/@babel/code-frame": { + "version": "7.12.11", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.12.11.tgz", + "integrity": "sha512-Zt1yodBx1UcyiePMSkWnU4hPqhwq7hGi2nFL1LeA3EUl+q2LQx16MISgJ0+z7dnmgvP9QtIleuETGOiOH1RcIw==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.10.4" + } + }, + "node_modules/eslint/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/eslint/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/eslint-utils": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz", + "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^1.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/eslint/node_modules/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/eslint/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/eslint/node_modules/ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/eslint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/eslint/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint/node_modules/semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/eslint/node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/esm": { + "version": "3.2.25", + "resolved": "https://registry.npmjs.org/esm/-/esm-3.2.25.tgz", + "integrity": "sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/espree": { + "version": "7.3.1", + "resolved": "https://registry.npmjs.org/espree/-/espree-7.3.1.tgz", + "integrity": "sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g==", + "dev": true, + "dependencies": { + "acorn": "^7.4.0", + "acorn-jsx": "^5.3.1", + "eslint-visitor-keys": "^1.3.0" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/espree/node_modules/eslint-visitor-keys": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz", + "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esquery": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", + "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esquery/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", + "integrity": "sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/expand-brackets": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", + "integrity": "sha1-t3c14xXOMPa27/D4OwQVGiJEliI=", + "dev": true, + "dependencies": { + "debug": "^2.3.3", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "posix-character-classes": "^0.1.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-brackets/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/expand-brackets/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-brackets/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/expand-tilde": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/expand-tilde/-/expand-tilde-2.0.2.tgz", + "integrity": "sha1-l+gBqgUt8CRU3kawK/YhZCzchQI=", + "dev": true, + "dependencies": { + "homedir-polyfill": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/express": { + "version": "4.17.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.17.1.tgz", + "integrity": "sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g==", + "dependencies": { + "accepts": "~1.3.7", + "array-flatten": "1.1.1", + "body-parser": "1.19.0", + "content-disposition": "0.5.3", + "content-type": "~1.0.4", + "cookie": "0.4.0", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "~1.1.2", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "~1.1.2", + "fresh": "0.5.2", + "merge-descriptors": "1.0.1", + "methods": "~1.1.2", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.7", + "proxy-addr": "~2.0.5", + "qs": "6.7.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.1.2", + "send": "0.17.1", + "serve-static": "1.14.1", + "setprototypeof": "1.1.1", + "statuses": "~1.5.0", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/express-normalize-query-params-middleware": { + "version": "0.5.1", + "resolved": "https://registry.npmjs.org/express-normalize-query-params-middleware/-/express-normalize-query-params-middleware-0.5.1.tgz", + "integrity": "sha1-2+HoE5rssjT7attcAFnHXblzPSo=" + }, + "node_modules/express-openapi": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/express-openapi/-/express-openapi-9.3.0.tgz", + "integrity": "sha512-92H8nuvO1vVMutapDqQXESOxFnaC4/tZAXSi7kJMD+xWXZwNwmuinCxbfQc7JyUY6Y3+vjFXqJ7xeTCpsUhSiA==", + "dependencies": { + "express-normalize-query-params-middleware": "^0.5.0", + "openapi-framework": "^9.3.0", + "openapi-types": "^9.3.0" + } + }, + "node_modules/express/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/express/node_modules/qs": { + "version": "6.7.0", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.7.0.tgz", + "integrity": "sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/express/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/ext": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/ext/-/ext-1.4.0.tgz", + "integrity": "sha512-Key5NIsUxdqKg3vIsdw9dSuXpPCQ297y6wBjL30edxwPgt2E44WcWBZey/ZvUc6sERLTxKdyCu4gZFmUbk1Q7A==", + "dev": true, + "dependencies": { + "type": "^2.0.0" + } + }, + "node_modules/ext/node_modules/type": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/type/-/type-2.1.0.tgz", + "integrity": "sha512-G9absDWvhAWCV2gmF1zKud3OyC61nZDwWvBL2DApaVFogI07CprggiQAOOjvp2NRjYWFzPyu7vwtDrQFq8jeSA==", + "dev": true + }, + "node_modules/extend": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", + "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==", + "dev": true + }, + "node_modules/extend-shallow": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-3.0.2.tgz", + "integrity": "sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=", + "dev": true, + "dependencies": { + "assign-symbols": "^1.0.0", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extend-shallow/node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", + "integrity": "sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw==", + "dev": true, + "dependencies": { + "array-unique": "^0.3.2", + "define-property": "^1.0.0", + "expand-brackets": "^2.1.4", + "extend-shallow": "^2.0.1", + "fragment-cache": "^0.2.1", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/extglob/node_modules/is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/eyes": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/eyes/-/eyes-0.1.8.tgz", + "integrity": "sha1-Ys8SAjTGg3hdkCNIqADvPgzCC8A=", + "engines": { + "node": "> 0.1.90" + } + }, + "node_modules/fancy-log": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/fancy-log/-/fancy-log-1.3.3.tgz", + "integrity": "sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw==", + "dev": true, + "dependencies": { + "ansi-gray": "^0.1.1", + "color-support": "^1.1.3", + "parse-node-version": "^1.0.0", + "time-stamp": "^1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==" + }, + "node_modules/fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.2.4.tgz", + "integrity": "sha512-kr/Oo6PX51265qeuCYsyGypiO5uJFgBS0jksyG7FUeCyQzNwYnzrNIMR1NXfkZXsMYXYLRAHgISHBz8gQcxKHQ==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.0", + "merge2": "^1.3.0", + "micromatch": "^4.0.2", + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fast-glob/node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fast-glob/node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fast-glob/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/fast-glob/node_modules/micromatch": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.2.tgz", + "integrity": "sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q==", + "dev": true, + "dependencies": { + "braces": "^3.0.1", + "picomatch": "^2.0.5" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fast-glob/node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/fast-json-patch": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/fast-json-patch/-/fast-json-patch-3.1.1.tgz", + "integrity": "sha512-vf6IHUX2SBcA+5/+4883dsIjpBTqmfBjmYiWK1savxQmFk4JfBMLa7ynTYOs1Rolp/T1betJxHiGD3g1Mn8lUQ==" + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=" + }, + "node_modules/fast-safe-stringify": { + "version": "2.0.7", + "resolved": "https://registry.npmjs.org/fast-safe-stringify/-/fast-safe-stringify-2.0.7.tgz", + "integrity": "sha512-Utm6CdzT+6xsDk2m8S6uL8VHxNwI6Jub+e9NYTcAms28T84pTa25GJQV9j0CY0N1rM8hK4x6grpF2BQf+2qwVA==" + }, + "node_modules/fastq": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.10.0.tgz", + "integrity": "sha512-NL2Qc5L3iQEsyYzweq7qfgy5OtXCmGzGvhElGEd/SoFWEMOEczNh5s5ocaF01HDetxz+p8ecjNPA6cZxxIHmzA==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fecha": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.0.tgz", + "integrity": "sha512-aN3pcx/DSmtyoovUudctc8+6Hl4T+hI9GBBHLjA76jdZl7+b1sgh5g4k+u/GL3dTy1/pnYzKp69FpJ0OicE3Wg==" + }, + "node_modules/file-entry-cache": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", + "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", + "dev": true, + "dependencies": { + "flat-cache": "^3.0.4" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", + "dev": true, + "optional": true + }, + "node_modules/fill-range": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", + "integrity": "sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc=", + "dev": true, + "dependencies": { + "extend-shallow": "^2.0.1", + "is-number": "^3.0.0", + "repeat-string": "^1.6.1", + "to-regex-range": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fill-range/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/final-fs": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/final-fs/-/final-fs-1.6.1.tgz", + "integrity": "sha1-1tzZLvb+T+jAer1WjHE1YQ7eMjY=", + "dependencies": { + "node-fs": "~0.1.5", + "when": "~2.0.1" + } + }, + "node_modules/finalhandler": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", + "integrity": "sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA==", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/find-cache-dir": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.1.tgz", + "integrity": "sha512-t2GDMt3oGC/v+BMwzmllWDuJF/xcDtE5j/fCGbqDD7OLuJkj0cfh1YSA5VKPvwMeLFLNDBkwOKZ2X85jGLVftQ==", + "dev": true, + "dependencies": { + "commondir": "^1.0.1", + "make-dir": "^3.0.2", + "pkg-dir": "^4.1.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/avajs/find-cache-dir?sponsor=1" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/findup-sync": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-3.0.0.tgz", + "integrity": "sha512-YbffarhcicEhOrm4CtrwdKBdCuz576RLdhJDsIfvNtxUuhdRet1qZcsMjqbePtAseKdAnDyM/IyXbu7PRPRLYg==", + "dev": true, + "dependencies": { + "detect-file": "^1.0.0", + "is-glob": "^4.0.0", + "micromatch": "^3.0.4", + "resolve-dir": "^1.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/fined": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fined/-/fined-1.2.0.tgz", + "integrity": "sha512-ZYDqPLGxDkDhDZBjZBb+oD1+j0rA4E0pXY50eplAAOPg2N/gUBSSk5IM1/QhPfyVo19lJ+CvXpqfvk+b2p/8Ng==", + "dev": true, + "dependencies": { + "expand-tilde": "^2.0.2", + "is-plain-object": "^2.0.3", + "object.defaults": "^1.1.0", + "object.pick": "^1.2.0", + "parse-filepath": "^1.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/flagged-respawn": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/flagged-respawn/-/flagged-respawn-1.0.1.tgz", + "integrity": "sha512-lNaHNVymajmk0OJMBn8fVUAU1BtDeKIqKoVhk4xAALB57aALg6b4W0MfJ/cUE0g9YBXy5XhSlPIpYIJ7HaY/3Q==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/flat-cache": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.0.4.tgz", + "integrity": "sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg==", + "dev": true, + "dependencies": { + "flatted": "^3.1.0", + "rimraf": "^3.0.2" + }, + "engines": { + "node": "^10.12.0 || >=12.0.0" + } + }, + "node_modules/flat-cache/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/flatted": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.5.tgz", + "integrity": "sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg==", + "dev": true + }, + "node_modules/flush-write-stream": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/flush-write-stream/-/flush-write-stream-1.1.1.tgz", + "integrity": "sha512-3Z4XhFZ3992uIq0XOqb9AreonueSYphE6oYbpt5+3u06JWklbsPkNv3ZKkP9Bz/r+1MWCaMoSQ28P85+1Yc77w==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "readable-stream": "^2.3.6" + } + }, + "node_modules/flush-write-stream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/flush-write-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/flush-write-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/fn.name": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/fn.name/-/fn.name-1.1.0.tgz", + "integrity": "sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==" + }, + "node_modules/follow-redirects": { + "version": "1.14.8", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.8.tgz", + "integrity": "sha512-1x0S9UVJHsQprFcEC/qnNzBLcIxsjAV905f/UkQxbclCsoTWlacCNOpQa/anodLl2uaEKFhfWOvM2Qg77+15zA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/for-in": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", + "integrity": "sha1-gQaNKVqBQuwKxybG4iAMMPttXoA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/for-own": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/for-own/-/for-own-1.0.0.tgz", + "integrity": "sha1-xjMy9BXO3EsE2/5wz4NklMU8tEs=", + "dev": true, + "dependencies": { + "for-in": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/foreground-child": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", + "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/forwarded": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", + "integrity": "sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/frac": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/frac/-/frac-1.1.2.tgz", + "integrity": "sha512-w/XBfkibaTl3YDqASwfDUqkna4Z2p9cFSr1aHDt0WoMTECnRfBOv2WArlZILlqgWlmdIlALXGpM2AOhEk5W3IA==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/fragment-cache": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/fragment-cache/-/fragment-cache-0.2.1.tgz", + "integrity": "sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk=", + "dev": true, + "dependencies": { + "map-cache": "^0.2.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", + "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fromentries": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz", + "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/fs-mkdirp-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs-mkdirp-stream/-/fs-mkdirp-stream-1.0.0.tgz", + "integrity": "sha1-C3gV/DIBxqaeFNuYzgmMFpNSWes=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.11", + "through2": "^2.0.3" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/fs-routes": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/fs-routes/-/fs-routes-9.0.3.tgz", + "integrity": "sha512-Y5tkylY9fQ1jm11FdJoptzqIG3OyzqrOF16W5odNlIdqFqb2355IbNB3jQkE+C268mSShLmIur8ynYCgL/Yg/g==", + "peerDependencies": { + "glob": ">=7.1.6" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" + }, + "node_modules/functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.0.1.tgz", + "integrity": "sha512-ZnWP+AmS1VUaLgTRy47+zKtjTxz+0xMpx3I52i+aalBK1QP19ggLF3Db89KJX7kjfOfP2eoa01qc++GwPgufPg==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-stdin": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", + "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/get-stream": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", + "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", + "dev": true, + "dependencies": { + "pump": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/get-stream/node_modules/pump": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", + "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/get-value": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/get-value/-/get-value-2.0.6.tgz", + "integrity": "sha1-3BXKHGcjh8p2vTesCjlbogQqLCg=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/getopts": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/getopts/-/getopts-2.3.0.tgz", + "integrity": "sha512-5eDf9fuSXwxBL6q5HX+dhDj+dslFGWzU5thZ9kNKUkcPtaPdatmUFKwHFrLb/uf/WpA4BHET+AX3Scl56cAjpA==" + }, + "node_modules/glob": { + "version": "7.1.6", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz", + "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.1.tgz", + "integrity": "sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob-stream": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/glob-stream/-/glob-stream-6.1.0.tgz", + "integrity": "sha1-cEXJlBOz65SIjYOrRtC0BMx73eQ=", + "dev": true, + "dependencies": { + "extend": "^3.0.0", + "glob": "^7.1.1", + "glob-parent": "^3.1.0", + "is-negated-glob": "^1.0.0", + "ordered-read-streams": "^1.0.0", + "pumpify": "^1.3.5", + "readable-stream": "^2.1.5", + "remove-trailing-separator": "^1.0.1", + "to-absolute-glob": "^2.0.0", + "unique-stream": "^2.0.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/glob-stream/node_modules/glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "dependencies": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + } + }, + "node_modules/glob-stream/node_modules/is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob-stream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/glob-stream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/glob-stream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/glob-watcher": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/glob-watcher/-/glob-watcher-5.0.5.tgz", + "integrity": "sha512-zOZgGGEHPklZNjZQaZ9f41i7F2YwE+tS5ZHrDhbBCk3stwahn5vQxnFmBJZHoYdusR6R1bLSXeGUy/BhctwKzw==", + "dev": true, + "dependencies": { + "anymatch": "^2.0.0", + "async-done": "^1.2.0", + "chokidar": "^2.0.0", + "is-negated-glob": "^1.0.0", + "just-debounce": "^1.0.0", + "normalize-path": "^3.0.0", + "object.defaults": "^1.1.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/glob-watcher/node_modules/anymatch": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-2.0.0.tgz", + "integrity": "sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw==", + "dev": true, + "dependencies": { + "micromatch": "^3.1.4", + "normalize-path": "^2.1.1" + } + }, + "node_modules/glob-watcher/node_modules/anymatch/node_modules/normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, + "dependencies": { + "remove-trailing-separator": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob-watcher/node_modules/binary-extensions": { + "version": "1.13.1", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-1.13.1.tgz", + "integrity": "sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob-watcher/node_modules/chokidar": { + "version": "2.1.8", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-2.1.8.tgz", + "integrity": "sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg==", + "deprecated": "Chokidar 2 does not receive security updates since 2019. Upgrade to chokidar 3 with 15x fewer dependencies", + "dev": true, + "dependencies": { + "anymatch": "^2.0.0", + "async-each": "^1.0.1", + "braces": "^2.3.2", + "glob-parent": "^3.1.0", + "inherits": "^2.0.3", + "is-binary-path": "^1.0.0", + "is-glob": "^4.0.0", + "normalize-path": "^3.0.0", + "path-is-absolute": "^1.0.0", + "readdirp": "^2.2.1", + "upath": "^1.1.1" + }, + "optionalDependencies": { + "fsevents": "^1.2.7" + } + }, + "node_modules/glob-watcher/node_modules/fsevents": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-1.2.13.tgz", + "integrity": "sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw==", + "deprecated": "fsevents 1 will break on node v14+ and could be using insecure binaries. Upgrade to fsevents 2.", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "dependencies": { + "bindings": "^1.5.0", + "nan": "^2.12.1" + }, + "engines": { + "node": ">= 4.0" + } + }, + "node_modules/glob-watcher/node_modules/glob-parent": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-3.1.0.tgz", + "integrity": "sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4=", + "dev": true, + "dependencies": { + "is-glob": "^3.1.0", + "path-dirname": "^1.0.0" + } + }, + "node_modules/glob-watcher/node_modules/glob-parent/node_modules/is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob-watcher/node_modules/is-binary-path": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-1.0.1.tgz", + "integrity": "sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg=", + "dev": true, + "dependencies": { + "binary-extensions": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/glob-watcher/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/glob-watcher/node_modules/readdirp": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-2.2.1.tgz", + "integrity": "sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.11", + "micromatch": "^3.1.10", + "readable-stream": "^2.0.2" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/glob-watcher/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/glob-watcher/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/global-dirs": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/global-dirs/-/global-dirs-3.0.0.tgz", + "integrity": "sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==", + "dev": true, + "dependencies": { + "ini": "2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/global-dirs/node_modules/ini": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-2.0.0.tgz", + "integrity": "sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/global-modules": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/global-modules/-/global-modules-1.0.0.tgz", + "integrity": "sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==", + "dev": true, + "dependencies": { + "global-prefix": "^1.0.1", + "is-windows": "^1.0.1", + "resolve-dir": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/global-prefix": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/global-prefix/-/global-prefix-1.0.2.tgz", + "integrity": "sha1-2/dDxsFJklk8ZVVoy2btMsASLr4=", + "dev": true, + "dependencies": { + "expand-tilde": "^2.0.2", + "homedir-polyfill": "^1.0.1", + "ini": "^1.3.4", + "is-windows": "^1.0.1", + "which": "^1.2.14" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/globals": { + "version": "13.12.1", + "resolved": "https://registry.npmjs.org/globals/-/globals-13.12.1.tgz", + "integrity": "sha512-317dFlgY2pdJZ9rspXDks7073GpDmXdfbM3vYYp0HAMKGDh1FfWPleI2ljVNLQX5M5lXcAslTcPTrOrMEFOjyw==", + "dev": true, + "dependencies": { + "type-fest": "^0.20.2" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globals/node_modules/type-fest": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", + "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/globby": { + "version": "11.0.2", + "resolved": "https://registry.npmjs.org/globby/-/globby-11.0.2.tgz", + "integrity": "sha512-2ZThXDvvV8fYFRVIxnrMQBipZQDr7MxKAmQK1vujaj9/7eF0efG7BPUKJ7jP7G5SLF37xKDXvO4S/KKLj/Z0og==", + "dev": true, + "dependencies": { + "array-union": "^2.1.0", + "dir-glob": "^3.0.1", + "fast-glob": "^3.1.1", + "ignore": "^5.1.4", + "merge2": "^1.3.0", + "slash": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glogg": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/glogg/-/glogg-1.0.2.tgz", + "integrity": "sha512-5mwUoSuBk44Y4EshyiqcH95ZntbDdTQqA3QYSrxmzj28Ai0vXBGMH1ApSANH14j2sIRtqCEyg6PfsuP7ElOEDA==", + "dev": true, + "dependencies": { + "sparkles": "^1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/got": { + "version": "9.6.0", + "resolved": "https://registry.npmjs.org/got/-/got-9.6.0.tgz", + "integrity": "sha512-R7eWptXuGYxwijs0eV+v3o6+XH1IqVK8dJOEecQfTmkncw9AV4dcw/Dhxi8MdlqPthxxpZyizMzyg8RTmEsG+Q==", + "dev": true, + "dependencies": { + "@sindresorhus/is": "^0.14.0", + "@szmarczak/http-timer": "^1.1.2", + "cacheable-request": "^6.0.0", + "decompress-response": "^3.3.0", + "duplexer3": "^0.1.4", + "get-stream": "^4.1.0", + "lowercase-keys": "^1.0.1", + "mimic-response": "^1.0.1", + "p-cancelable": "^1.0.0", + "to-readable-stream": "^1.0.0", + "url-parse-lax": "^3.0.0" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.4", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz", + "integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw==", + "dev": true + }, + "node_modules/growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true, + "engines": { + "node": ">=4.x" + } + }, + "node_modules/gulp": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/gulp/-/gulp-4.0.2.tgz", + "integrity": "sha512-dvEs27SCZt2ibF29xYgmnwwCYZxdxhQ/+LFWlbAW8y7jt68L/65402Lz3+CKy0Ov4rOs+NERmDq7YlZaDqUIfA==", + "dev": true, + "dependencies": { + "glob-watcher": "^5.0.3", + "gulp-cli": "^2.2.0", + "undertaker": "^1.2.1", + "vinyl-fs": "^3.0.0" + }, + "bin": { + "gulp": "bin/gulp.js" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/gulp-typescript": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/gulp-typescript/-/gulp-typescript-5.0.1.tgz", + "integrity": "sha512-YuMMlylyJtUSHG1/wuSVTrZp60k1dMEFKYOvDf7OvbAJWrDtxxD4oZon4ancdWwzjj30ztiidhe4VXJniF0pIQ==", + "dev": true, + "dependencies": { + "ansi-colors": "^3.0.5", + "plugin-error": "^1.0.1", + "source-map": "^0.7.3", + "through2": "^3.0.0", + "vinyl": "^2.1.0", + "vinyl-fs": "^3.0.3" + }, + "engines": { + "node": ">= 8" + }, + "peerDependencies": { + "typescript": "~2.7.1 || >=2.8.0-dev || >=2.9.0-dev || ~3.0.0 || >=3.0.0-dev || >=3.1.0-dev || >= 3.2.0-dev || >= 3.3.0-dev" + } + }, + "node_modules/gulp-typescript/node_modules/ansi-colors": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-3.2.4.tgz", + "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/gulp-typescript/node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/gulp-typescript/node_modules/source-map": { + "version": "0.7.3", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.3.tgz", + "integrity": "sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/gulp-typescript/node_modules/through2": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/through2/-/through2-3.0.2.tgz", + "integrity": "sha512-enaDQ4MUyP2W6ZyT6EsMzqBPZaM/avg8iuo+l2d3QCs0J+6RaqkHV/2/lOwDTueBHeJ/2LG9lrLW3d5rWPucuQ==", + "dev": true, + "dependencies": { + "inherits": "^2.0.4", + "readable-stream": "2 || 3" + } + }, + "node_modules/gulp/node_modules/ansi-colors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-1.1.0.tgz", + "integrity": "sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA==", + "dev": true, + "dependencies": { + "ansi-wrap": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/ansi-regex": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-2.1.1.tgz", + "integrity": "sha1-w7M6te42DYbg5ijwRorn7yfWVN8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/camelcase": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-3.0.0.tgz", + "integrity": "sha1-MvxLn82vhF/N9+c7uXysImHwqwo=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/cliui": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-3.2.0.tgz", + "integrity": "sha1-EgYBU3qRbSmUD5NNo7SNWFo5IT0=", + "dev": true, + "dependencies": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1", + "wrap-ansi": "^2.0.0" + } + }, + "node_modules/gulp/node_modules/get-caller-file": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", + "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", + "dev": true + }, + "node_modules/gulp/node_modules/gulp-cli": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/gulp-cli/-/gulp-cli-2.3.0.tgz", + "integrity": "sha512-zzGBl5fHo0EKSXsHzjspp3y5CONegCm8ErO5Qh0UzFzk2y4tMvzLWhoDokADbarfZRL2pGpRp7yt6gfJX4ph7A==", + "dev": true, + "dependencies": { + "ansi-colors": "^1.0.1", + "archy": "^1.0.0", + "array-sort": "^1.0.0", + "color-support": "^1.1.3", + "concat-stream": "^1.6.0", + "copy-props": "^2.0.1", + "fancy-log": "^1.3.2", + "gulplog": "^1.0.0", + "interpret": "^1.4.0", + "isobject": "^3.0.1", + "liftoff": "^3.1.0", + "matchdep": "^2.0.0", + "mute-stdout": "^1.0.0", + "pretty-hrtime": "^1.0.0", + "replace-homedir": "^1.0.0", + "semver-greatest-satisfied-range": "^1.1.0", + "v8flags": "^3.2.0", + "yargs": "^7.1.0" + }, + "bin": { + "gulp": "bin/gulp.js" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/gulp/node_modules/interpret": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-1.4.0.tgz", + "integrity": "sha512-agE4QfB2Lkp9uICn7BAqoscw4SZP9kTE2hxiFI3jBPmXJfdqiahTbUuKGsMoN2GtqL9AxhYioAcVvgsb1HvRbA==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/gulp/node_modules/is-fullwidth-code-point": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-1.0.0.tgz", + "integrity": "sha1-754xOG8DGn8NZDr4L95QxFfvAMs=", + "dev": true, + "dependencies": { + "number-is-nan": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/require-main-filename": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-1.0.1.tgz", + "integrity": "sha1-l/cXtp1IeE9fUmpsWqj/3aBVpNE=", + "dev": true + }, + "node_modules/gulp/node_modules/string-width": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-1.0.2.tgz", + "integrity": "sha1-EYvfW4zcUaKn5w0hHgfisLmxB9M=", + "dev": true, + "dependencies": { + "code-point-at": "^1.0.0", + "is-fullwidth-code-point": "^1.0.0", + "strip-ansi": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/strip-ansi": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz", + "integrity": "sha1-ajhfuIU9lS1f8F0Oiq+UJ43GPc8=", + "dev": true, + "dependencies": { + "ansi-regex": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/which-module": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-1.0.0.tgz", + "integrity": "sha1-u6Y8qGGUiZT/MHc2CJ47lgJsKk8=", + "dev": true + }, + "node_modules/gulp/node_modules/wrap-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-2.1.0.tgz", + "integrity": "sha1-2Pw9KE3QV5T+hJc8rs3Rz4JP3YU=", + "dev": true, + "dependencies": { + "string-width": "^1.0.1", + "strip-ansi": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/gulp/node_modules/y18n": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-3.2.1.tgz", + "integrity": "sha1-bRX7qITAhnnA136I53WegR4H+kE=", + "dev": true + }, + "node_modules/gulp/node_modules/yargs": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-7.1.1.tgz", + "integrity": "sha512-huO4Fr1f9PmiJJdll5kwoS2e4GqzGSsMT3PPMpOwoVkOK8ckqAewMTZyA6LXVQWflleb/Z8oPBEvNsMft0XE+g==", + "dev": true, + "dependencies": { + "camelcase": "^3.0.0", + "cliui": "^3.2.0", + "decamelize": "^1.1.1", + "get-caller-file": "^1.0.1", + "os-locale": "^1.4.0", + "read-pkg-up": "^1.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^1.0.1", + "set-blocking": "^2.0.0", + "string-width": "^1.0.2", + "which-module": "^1.0.0", + "y18n": "^3.2.1", + "yargs-parser": "5.0.0-security.0" + } + }, + "node_modules/gulp/node_modules/yargs-parser": { + "version": "5.0.0-security.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-5.0.0-security.0.tgz", + "integrity": "sha512-T69y4Ps64LNesYxeYGYPvfoMTt/7y1XtfpIslUeK4um+9Hu7hlGoRtaDLvdXb7+/tfq4opVa2HRY5xGip022rQ==", + "dev": true, + "dependencies": { + "camelcase": "^3.0.0", + "object.assign": "^4.1.0" + } + }, + "node_modules/gulplog": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/gulplog/-/gulplog-1.0.0.tgz", + "integrity": "sha1-4oxNRdBey77YGDY86PnFkmIp/+U=", + "dev": true, + "dependencies": { + "glogg": "^1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/has-symbols": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", + "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-1.0.0.tgz", + "integrity": "sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc=", + "dev": true, + "dependencies": { + "get-value": "^2.0.6", + "has-values": "^1.0.0", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-1.0.0.tgz", + "integrity": "sha1-lbC2P+whRmGab+V/51Yo1aOe/k8=", + "dev": true, + "dependencies": { + "is-number": "^3.0.0", + "kind-of": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-values/node_modules/kind-of": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-4.0.0.tgz", + "integrity": "sha1-IIE989cSkosgc3hpGkUGb65y3Vc=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/has-yarn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/has-yarn/-/has-yarn-2.1.0.tgz", + "integrity": "sha512-UqBRqi4ju7T+TqGNdqAO0PaSVGsDGJUBQvk9eUWNGRY1CFGDzYhLWoM7JQEemnlvVcv/YEmc2wNW8BC24EnUsw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/hasha": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz", + "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==", + "dev": true, + "dependencies": { + "is-stream": "^2.0.0", + "type-fest": "^0.8.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/homedir-polyfill": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz", + "integrity": "sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==", + "dev": true, + "dependencies": { + "parse-passwd": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/hosted-git-info": { + "version": "2.8.9", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz", + "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==", + "dev": true + }, + "node_modules/hpagent": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/hpagent/-/hpagent-0.1.2.tgz", + "integrity": "sha512-ePqFXHtSQWAFXYmj+JtOTHr84iNrII4/QRlAAPPE+zqnKy4xJo7Ie1Y4kC7AdB+LxLxSTTzBMASsEcy0q8YyvQ==" + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, + "node_modules/http-cache-semantics": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.0.tgz", + "integrity": "sha512-carPklcUh7ROWRK7Cv27RPtdhYhUsela/ue5/jKzjegVvXDqM2ILE9Q2BGn9JZJh1g87cp56su/FgQSzcWS8cQ==", + "dev": true + }, + "node_modules/http-errors": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-1.7.2.tgz", + "integrity": "sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg==", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.1", + "statuses": ">= 1.5.0 < 2", + "toidentifier": "1.0.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/i": { + "version": "0.3.7", + "resolved": "https://registry.npmjs.org/i/-/i-0.3.7.tgz", + "integrity": "sha512-FYz4wlXgkQwIPqhzC5TdNMLSE5+GS1IIDJZY/1ZiEPCT2S3COUVZeT5OW4BmW4r5LHLQuOosSwsvnroG9GR59Q==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", + "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==" + }, + "node_modules/ignore": { + "version": "5.1.8", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.1.8.tgz", + "integrity": "sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-by-default": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ignore-by-default/-/ignore-by-default-1.0.1.tgz", + "integrity": "sha1-SMptcvbGo68Aqa1K5odr44ieKwk=", + "dev": true + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-lazy": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/import-lazy/-/import-lazy-2.1.0.tgz", + "integrity": "sha1-BWmOPUXIjo1+nZLLBYTnfwlvPkM=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha1-khi5srkoojixPcT7a21XbyMUU+o=", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", + "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/inflection": { + "version": "1.12.0", + "resolved": "https://registry.npmjs.org/inflection/-/inflection-1.12.0.tgz", + "integrity": "sha1-ogCTVlbW9fa8TcdQLhrstwMihBY=", + "engines": [ + "node >= 0.4.0" + ] + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz", + "integrity": "sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=" + }, + "node_modules/ini": { + "version": "1.3.8", + "resolved": "https://registry.npmjs.org/ini/-/ini-1.3.8.tgz", + "integrity": "sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==" + }, + "node_modules/interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/invert-kv": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/invert-kv/-/invert-kv-1.0.0.tgz", + "integrity": "sha1-EEqOSqym09jNFXqO+L+rLXo//bY=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-absolute": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-absolute/-/is-absolute-1.0.0.tgz", + "integrity": "sha512-dOWoqflvcydARa360Gvv18DZ/gRuHKi2NU/wU5X1ZFzdYfH29nkiNZsF3mp4OJ3H4yo9Mx8A/uAGNzpzPN3yBA==", + "dev": true, + "dependencies": { + "is-relative": "^1.0.0", + "is-windows": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-accessor-descriptor": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz", + "integrity": "sha1-qeEss66Nh2cn7u84Q/igiXtcmNY=", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-accessor-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-arrayish": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.3.2.tgz", + "integrity": "sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ==" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-buffer": { + "version": "1.1.6", + "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-1.1.6.tgz", + "integrity": "sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==", + "dev": true + }, + "node_modules/is-callable": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.2.tgz", + "integrity": "sha512-dnMqspv5nU3LoewK2N/y7KLtxtakvTuaCsU9FU50/QDmdbHNy/4/JuRtMHqRU22o3q+W89YQndQEeCVwK+3qrA==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-ci": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-ci/-/is-ci-2.0.0.tgz", + "integrity": "sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w==", + "dev": true, + "dependencies": { + "ci-info": "^2.0.0" + }, + "bin": { + "is-ci": "bin.js" + } + }, + "node_modules/is-core-module": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.2.0.tgz", + "integrity": "sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ==", + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-descriptor": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz", + "integrity": "sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y=", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-data-descriptor/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-date-object": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", + "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-descriptor": { + "version": "0.1.6", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-0.1.6.tgz", + "integrity": "sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^0.1.6", + "is-data-descriptor": "^0.1.4", + "kind-of": "^5.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-descriptor/node_modules/kind-of": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-5.1.0.tgz", + "integrity": "sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-dir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-dir/-/is-dir-1.0.0.tgz", + "integrity": "sha1-QdN/SV/MrMBaR3jWboMCTCkro/8=" + }, + "node_modules/is-extendable": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-0.1.1.tgz", + "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-installed-globally": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/is-installed-globally/-/is-installed-globally-0.4.0.tgz", + "integrity": "sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==", + "dev": true, + "dependencies": { + "global-dirs": "^3.0.0", + "is-path-inside": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-negated-glob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-negated-glob/-/is-negated-glob-1.0.0.tgz", + "integrity": "sha1-aRC8pdqMleeEtXUbl2z1oQ/uNtI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-negative-zero": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz", + "integrity": "sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-npm": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/is-npm/-/is-npm-5.0.0.tgz", + "integrity": "sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-number": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", + "integrity": "sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU=", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-obj": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz", + "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-path-cwd": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/is-path-cwd/-/is-path-cwd-2.2.0.tgz", + "integrity": "sha512-w942bTcih8fdJPJmQHFzkS76NEP8Kzzvmw92cXsazb8intwLqPibPPdXf4ANdKV3rYMuuQYGIWtvz9JilB3NFQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.2.tgz", + "integrity": "sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-regex": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.1.tgz", + "integrity": "sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-relative": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-relative/-/is-relative-1.0.0.tgz", + "integrity": "sha512-Kw/ReK0iqwKeu0MITLFuj0jbPAmEiOsIwyIXvvbfa6QfmN9pkD1M+8pdk7Rl/dTKbH34/XBFMbgD4iMJhLQbGA==", + "dev": true, + "dependencies": { + "is-unc-path": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", + "integrity": "sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-symbol": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", + "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", + "dev": true, + "dependencies": { + "has-symbols": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-typedarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", + "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", + "dev": true + }, + "node_modules/is-unc-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-unc-path/-/is-unc-path-1.0.0.tgz", + "integrity": "sha512-mrGpVd0fs7WWLfVsStvgF6iEJnbjDFZh9/emhRDcGWTduTfNHd9CHeUwH3gYIjdbwo4On6hunkztwOaAw0yllQ==", + "dev": true, + "dependencies": { + "unc-path-regex": "^0.1.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-utf8": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-utf8/-/is-utf8-0.2.1.tgz", + "integrity": "sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=", + "dev": true + }, + "node_modules/is-valid-glob": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-valid-glob/-/is-valid-glob-1.0.0.tgz", + "integrity": "sha1-Kb8+/3Ab4tTTFdusw5vDn+j2Aao=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-windows": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz", + "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-yarn-global": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/is-yarn-global/-/is-yarn-global-0.3.0.tgz", + "integrity": "sha512-VjSeb/lHmkoyd8ryPVIKvOCn4D1koMqY+vqyjjUfc3xyKtP4dYOxM44sZrnqQSzSds3xyOrUTLTC9LVCVgLngw==", + "dev": true + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=" + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha1-TkMekrEalzFjaqH5yNHMvP2reN8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/isstream": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", + "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz", + "integrity": "sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-hook": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz", + "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==", + "dev": true, + "dependencies": { + "append-transform": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz", + "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==", + "dev": true, + "dependencies": { + "@babel/core": "^7.7.5", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.0.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/istanbul-lib-processinfo": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.2.tgz", + "integrity": "sha512-kOwpa7z9hme+IBPZMzQ5vdQj8srYgAtaRqeI48NGmAQ+/5yKiHLV0QbYqQpxsdEF0+w14SoB8YbnHKcXE2KnYw==", + "dev": true, + "dependencies": { + "archy": "^1.0.0", + "cross-spawn": "^7.0.0", + "istanbul-lib-coverage": "^3.0.0-alpha.1", + "make-dir": "^3.0.0", + "p-map": "^3.0.0", + "rimraf": "^3.0.0", + "uuid": "^3.3.3" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/istanbul-lib-processinfo/node_modules/uuid": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.4.0.tgz", + "integrity": "sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A==", + "deprecated": "Please upgrade to version 7 or higher. Older versions may use Math.random() in certain circumstances, which is known to be problematic. See https://v8.dev/blog/math-random for details.", + "dev": true, + "bin": { + "uuid": "bin/uuid" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz", + "integrity": "sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^3.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-report/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz", + "integrity": "sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-reports": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.0.2.tgz", + "integrity": "sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jmespath": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.15.0.tgz", + "integrity": "sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc=", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/jose": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/jose/-/jose-2.0.5.tgz", + "integrity": "sha512-BAiDNeDKTMgk4tvD0BbxJ8xHEHBZgpeRZ1zGPPsitSyMgjoMWiLGYAE7H7NpP5h0lPppQajQs871E8NHUrzVPA==", + "dependencies": { + "@panva/asn1.js": "^1.0.0" + }, + "engines": { + "node": ">=10.13.0 < 13 || >=13.7.0" + }, + "funding": { + "url": "https://github.com/sponsors/panva" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/json-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", + "integrity": "sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=", + "dev": true + }, + "node_modules/json-parse-better-errors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", + "integrity": "sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz", + "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==" + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, + "node_modules/json5": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.1.3.tgz", + "integrity": "sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA==", + "dev": true, + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonpath": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.1.1.tgz", + "integrity": "sha512-l6Cg7jRpixfbgoWgkrl77dgEj8RPvND0wMH6TwQmi9Qs4TFfS9u5cUFnbeKTwj5ga5Y3BTGGNI28k117LJ009w==", + "dependencies": { + "esprima": "1.2.2", + "static-eval": "2.0.2", + "underscore": "1.12.1" + } + }, + "node_modules/jsonpath-plus": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-7.2.0.tgz", + "integrity": "sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==", + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/jsonpath/node_modules/esprima": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz", + "integrity": "sha1-dqD9Zvz+FU/SkmZ9wmQBl1CxZXs=", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/jsonwebtoken": { + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", + "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=4", + "npm": ">=1.4.28" + } + }, + "node_modules/jsonwebtoken/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/just-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/just-debounce/-/just-debounce-1.0.0.tgz", + "integrity": "sha1-h/zPrv/AtozRnVX2cilD+SnqNeo=", + "dev": true + }, + "node_modules/just-extend": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-4.2.1.tgz", + "integrity": "sha512-g3UB796vUFIY90VIv/WX3L2c8CS2MdWUww3CNrYmqza1Fg0DURc2K/O4YrnklBdQarSJ/y8JnJYDGc+1iumQjg==", + "dev": true + }, + "node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jwks-rsa": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/jwks-rsa/-/jwks-rsa-2.0.5.tgz", + "integrity": "sha512-fliHfsiBRzEU0nXzSvwnh0hynzGB0WihF+CinKbSRlaqRxbqqKf2xbBPgwc8mzf18/WgwlG8e5eTpfSTBcU4DQ==", + "dependencies": { + "@types/express-jwt": "0.0.42", + "debug": "^4.3.2", + "jose": "^2.0.5", + "limiter": "^1.1.5", + "lru-memoizer": "^2.1.4" + }, + "engines": { + "node": ">=10 < 13 || >=14" + } + }, + "node_modules/jwks-rsa/node_modules/debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/jwks-rsa/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/keyv": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.1.0.tgz", + "integrity": "sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.0" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/knex": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/knex/-/knex-1.0.1.tgz", + "integrity": "sha512-pusgMo74lEbUxmri+YfWV8x/LJacP/2KcemTCKH7WnXFYz5RoMi+8WM4OJ05b0glfF+aWB4nkFsxsXxJ8qioLQ==", + "dependencies": { + "colorette": "2.0.16", + "commander": "^8.3.0", + "debug": "4.3.3", + "escalade": "^3.1.1", + "esm": "^3.2.25", + "getopts": "2.3.0", + "interpret": "^2.2.0", + "lodash": "^4.17.21", + "pg-connection-string": "2.5.0", + "rechoir": "^0.8.0", + "resolve-from": "^5.0.0", + "tarn": "^3.0.2", + "tildify": "2.0.0" + }, + "bin": { + "knex": "bin/cli.js" + }, + "engines": { + "node": ">=12" + }, + "peerDependenciesMeta": { + "@vscode/sqlite3": { + "optional": true + }, + "better-sqlite3": { + "optional": true + }, + "mysql": { + "optional": true + }, + "mysql2": { + "optional": true + }, + "pg": { + "optional": true + }, + "pg-native": { + "optional": true + }, + "sqlite3": { + "optional": true + }, + "tedious": { + "optional": true + } + } + }, + "node_modules/knex/node_modules/debug": { + "version": "4.3.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.3.tgz", + "integrity": "sha512-/zxw5+vh1Tfv+4Qn7a5nsbcJKPaSvCDhojn6FEl9vupwK2VCSDtEiEtqr8DFtzYFOdz63LBkxec7DYuc2jon6Q==", + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/knex/node_modules/is-core-module": { + "version": "2.8.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.1.tgz", + "integrity": "sha512-SdNCUs284hr40hFTFP6l0IfZ/RSrMXF3qgoRHd3/79unUTvrFO/JoXwkGm+5J/Oe3E/b5GsnG330uUNgRpu1PA==", + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/knex/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + }, + "node_modules/knex/node_modules/rechoir": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.8.0.tgz", + "integrity": "sha512-/vxpCXddiX8NGfGO/mTafwjq4aFa/71pvamip0++IQk3zG8cbCj0fifNPrjjF1XMXUne91jL9OoxmdykoEtifQ==", + "dependencies": { + "resolve": "^1.20.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/knex/node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/knex/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/kuler": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/kuler/-/kuler-2.0.0.tgz", + "integrity": "sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A==" + }, + "node_modules/last-run": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/last-run/-/last-run-1.1.1.tgz", + "integrity": "sha1-RblpQsF7HHnHchmCWbqUO+v4yls=", + "dev": true, + "dependencies": { + "default-resolution": "^2.0.0", + "es6-weak-map": "^2.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/latest-version": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/latest-version/-/latest-version-5.1.0.tgz", + "integrity": "sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==", + "dev": true, + "dependencies": { + "package-json": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lazystream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lazystream/-/lazystream-1.0.0.tgz", + "integrity": "sha1-9plf4PggOS9hOWvolGJAe7dxaOQ=", + "dev": true, + "dependencies": { + "readable-stream": "^2.0.5" + }, + "engines": { + "node": ">= 0.6.3" + } + }, + "node_modules/lazystream/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/lazystream/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/lazystream/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/lcid": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lcid/-/lcid-1.0.0.tgz", + "integrity": "sha1-MIrMr6C8SDo4Z7S28rlQYlHRuDU=", + "dev": true, + "dependencies": { + "invert-kv": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lead": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/lead/-/lead-1.0.0.tgz", + "integrity": "sha1-bxT5mje+Op3XhPVJVpDlkDRm7kI=", + "dev": true, + "dependencies": { + "flush-write-stream": "^1.0.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/liftoff": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/liftoff/-/liftoff-3.1.0.tgz", + "integrity": "sha512-DlIPlJUkCV0Ips2zf2pJP0unEoT1kwYhiiPUGF3s/jtxTCjziNLoiVVh+jqWOWeFi6mmwQ5fNxvAUyPad4Dfog==", + "dev": true, + "dependencies": { + "extend": "^3.0.0", + "findup-sync": "^3.0.0", + "fined": "^1.0.1", + "flagged-respawn": "^1.0.0", + "is-plain-object": "^2.0.4", + "object.map": "^1.0.0", + "rechoir": "^0.6.2", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/limiter": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/limiter/-/limiter-1.1.5.tgz", + "integrity": "sha512-FWWMIEOxz3GwUI4Ts/IvgVy6LPvoMPgjMdQ185nN6psJyBJ4yOpzqm695/h5umdLJg2vW3GR5iG11MAkR2AzJA==" + }, + "node_modules/load-json-file": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-1.1.0.tgz", + "integrity": "sha1-lWkFcI1YtLq0wiYbBPWfMcmTdMA=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0", + "strip-bom": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, + "node_modules/lodash.clonedeep": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz", + "integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=" + }, + "node_modules/lodash.defaults": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/lodash.defaults/-/lodash.defaults-4.2.0.tgz", + "integrity": "sha1-0JF4cW/+pN3p5ft7N/bwgCJ0WAw=" + }, + "node_modules/lodash.flattendeep": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz", + "integrity": "sha1-+wMJF/hqMTTlvJvsDWngAT3f7bI=", + "dev": true + }, + "node_modules/lodash.get": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz", + "integrity": "sha1-LRd/ZS+jHpObRDjVNBSZ36OCXpk=", + "dev": true + }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha1-YLuYqHy5I8aMoeUTJUgzFISfVT8=" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha1-bC4XHbKiV82WgC/UOwGyDV9YcPY=" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha1-YZwK89A/iwTDH1iChAt3sRzWg0M=" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha1-POdoEMWSjQM1IwGsKHMX8RwLH/w=" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha1-DdOXEhPHxW34gJd9UEyI+0cal6w=" + }, + "node_modules/lodash.truncate": { + "version": "4.4.2", + "resolved": "https://registry.npmjs.org/lodash.truncate/-/lodash.truncate-4.4.2.tgz", + "integrity": "sha1-WjUNoLERO4N+z//VgSy+WNbq4ZM=", + "dev": true + }, + "node_modules/log-symbols": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.0.0.tgz", + "integrity": "sha512-FN8JBzLx6CzeMrB0tg6pqlGU1wCrXW+ZXGH481kfsBqer0hToTIiHdjH4Mq8xJUbvATujKCvaREGWpGUionraA==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/logform": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/logform/-/logform-2.2.0.tgz", + "integrity": "sha512-N0qPlqfypFx7UHNn4B3lzS/b0uLqt2hmuoa+PpuXNYgozdJYAyauF5Ky0BWVjrxDlMWiT3qN4zPq3vVAfZy7Yg==", + "dependencies": { + "colors": "^1.2.1", + "fast-safe-stringify": "^2.0.4", + "fecha": "^4.2.0", + "ms": "^2.1.1", + "triple-beam": "^1.3.0" + } + }, + "node_modules/logform/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/lowercase-keys": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", + "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/lru-cache": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.2.tgz", + "integrity": "sha1-HRdnnAac2l0ECZGgnbwsDbN35V4=", + "dependencies": { + "pseudomap": "^1.0.1", + "yallist": "^2.0.0" + } + }, + "node_modules/lru-memoizer": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/lru-memoizer/-/lru-memoizer-2.1.4.tgz", + "integrity": "sha512-IXAq50s4qwrOBrXJklY+KhgZF+5y98PDaNo0gi/v2KQBFLyWr+JyFvijZXkGKjQj/h9c0OwoE+JZbwUXce76hQ==", + "dependencies": { + "lodash.clonedeep": "^4.5.0", + "lru-cache": "~4.0.0" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "dev": true, + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/make-iterator": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/make-iterator/-/make-iterator-1.0.1.tgz", + "integrity": "sha512-pxiuXh0iVEq7VM7KMIhs5gxsfxCux2URptUQaXo4iZZJxBAzTPOLE2BumO5dbfVYq/hBJFBR/a1mFDmOx5AGmw==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/map-cache": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/map-cache/-/map-cache-0.2.2.tgz", + "integrity": "sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/map-visit": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/map-visit/-/map-visit-1.0.0.tgz", + "integrity": "sha1-7Nyo8TFE5mDxtb1B8S80edmN+48=", + "dev": true, + "dependencies": { + "object-visit": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/matchdep": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/matchdep/-/matchdep-2.0.0.tgz", + "integrity": "sha1-xvNINKDY28OzfCfui7yyfHd1WC4=", + "dev": true, + "dependencies": { + "findup-sync": "^2.0.0", + "micromatch": "^3.0.4", + "resolve": "^1.4.0", + "stack-trace": "0.0.10" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/matchdep/node_modules/findup-sync": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/findup-sync/-/findup-sync-2.0.0.tgz", + "integrity": "sha1-kyaxSIwi0aYIhlCoaQGy2akKLLw=", + "dev": true, + "dependencies": { + "detect-file": "^1.0.0", + "is-glob": "^3.1.0", + "micromatch": "^3.0.4", + "resolve-dir": "^1.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/matchdep/node_modules/is-glob": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-3.1.0.tgz", + "integrity": "sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo=", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", + "integrity": "sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/memorystream": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/memorystream/-/memorystream-0.3.1.tgz", + "integrity": "sha1-htcJCzDORV1j+64S3aUaR93K+bI=", + "dev": true, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", + "integrity": "sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E=" + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", + "integrity": "sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-3.1.10.tgz", + "integrity": "sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg==", + "dev": true, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "braces": "^2.3.1", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "extglob": "^2.0.4", + "fragment-cache": "^0.2.1", + "kind-of": "^6.0.2", + "nanomatch": "^1.2.9", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/mime": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/mime/-/mime-2.5.2.tgz", + "integrity": "sha512-tqkh47FzKeCPD2PUiPB6pkbMzsCasjxAfC62/Wap5qrUWcb+sFasXUC5I3gYM5iBM8v/Qpn4UK0x+j0iHyFPDg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.44.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.44.0.tgz", + "integrity": "sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.27", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.27.tgz", + "integrity": "sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w==", + "dependencies": { + "mime-db": "1.44.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-response": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", + "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz", + "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q==" + }, + "node_modules/mixin-deep": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/mixin-deep/-/mixin-deep-1.3.2.tgz", + "integrity": "sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA==", + "dev": true, + "dependencies": { + "for-in": "^1.0.2", + "is-extendable": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/mixin-deep/node_modules/is-extendable": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-extendable/-/is-extendable-1.0.1.tgz", + "integrity": "sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/mkdirp": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz", + "integrity": "sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==", + "dependencies": { + "minimist": "^1.2.5" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mocha": { + "version": "8.4.0", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-8.4.0.tgz", + "integrity": "sha512-hJaO0mwDXmZS4ghXsvPVriOhsxQ7ofcpQdm8dE+jISUOKopitvnXFQmpRR7jd2K6VBG6E26gU3IAbXXGIbu4sQ==", + "dev": true, + "dependencies": { + "@ungap/promise-all-settled": "1.1.2", + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.1", + "debug": "4.3.1", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.6", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "4.0.0", + "log-symbols": "4.0.0", + "minimatch": "3.0.4", + "ms": "2.1.3", + "nanoid": "3.1.20", + "serialize-javascript": "5.0.1", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "which": "2.0.2", + "wide-align": "1.1.3", + "workerpool": "6.1.0", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha" + }, + "engines": { + "node": ">= 10.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/mocha/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/mocha/node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mocha/node_modules/chokidar": { + "version": "3.5.1", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.1.tgz", + "integrity": "sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.1", + "braces": "~3.0.2", + "glob-parent": "~5.1.0", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.5.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.1" + } + }, + "node_modules/mocha/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/mocha/node_modules/debug": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", + "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/mocha/node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/mocha/node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/mocha/node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mocha/node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/mocha/node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/mocha/node_modules/js-yaml": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.0.0.tgz", + "integrity": "sha512-pqon0s+4ScYUvX30wxQi3PogGFAlUyH0awepWvwkj4jD4v+ova3RiYw8bmA6x2rDrEaj8i/oWKoRxpVNW+Re8Q==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/mocha/node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/mocha/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/readdirp": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.5.0.tgz", + "integrity": "sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/mocha/node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/mocha/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/mocha/node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/mocha/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mocha/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/mocha/node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/mocha/node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/moment": { + "version": "2.29.2", + "resolved": "https://registry.npmjs.org/moment/-/moment-2.29.2.tgz", + "integrity": "sha512-UgzG4rvxYpN15jgCmVJwac49h9ly9NurikMWGPdVxm8GZD6XjkKPxDTjQQ43gtGgnV3X0cAyWDdP2Wexoquifg==", + "engines": { + "node": "*" + } + }, + "node_modules/mongodb-uri": { + "version": "0.9.7", + "resolved": "https://registry.npmjs.org/mongodb-uri/-/mongodb-uri-0.9.7.tgz", + "integrity": "sha1-D3ca0W9IOuZfQoeWlCjp+8SqYYE=", + "engines": { + "node": ">= 0.6.0" + } + }, + "node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "node_modules/multer": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/multer/-/multer-1.4.3.tgz", + "integrity": "sha512-np0YLKncuZoTzufbkM6wEKp68EhWJXcU6fq6QqrSwkckd2LlMgd1UqhUJLj6NS/5sZ8dE8LYDWslsltJznnXlg==", + "deprecated": "Multer 1.x is affected by CVE-2022-24434. This is fixed in v1.4.4-lts.1 which drops support for versions of Node.js before 6. Please upgrade to at least Node.js 6 and version 1.4.4-lts.1 of Multer. If you need support for older versions of Node.js, we are open to accepting patches that would fix the CVE on the main 1.x release line, whilst maintaining compatibility with Node.js 0.10.", + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^0.2.11", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "on-finished": "^2.3.0", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/mute-stdout": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/mute-stdout/-/mute-stdout-1.0.1.tgz", + "integrity": "sha512-kDcwXR4PS7caBpuRYYBUz9iVixUk3anO3f5OYFiIPwK/20vCzKCHyKoulbiDY1S53zD2bxUpxN/IJ+TnXjfvxg==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==" + }, + "node_modules/nan": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.15.0.tgz", + "integrity": "sha512-8ZtvEnA2c5aYCZYd1cvgdnU6cqwixRoYg70xPLWUws5ORTa/lnw+u4amixRS/Ac5U5mQVgp9pnlSUnbNWFaWZQ==", + "dev": true, + "optional": true + }, + "node_modules/nanoid": { + "version": "3.1.20", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.20.tgz", + "integrity": "sha512-a1cQNyczgKbLX9jwbS/+d7W8fX/RfgYR7lVWwWOGIPNgK2m0MWvrGF6/m4kk6U3QcFMnZf3RIhL0v2Jgh/0Uxw==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/nanomatch": { + "version": "1.2.13", + "resolved": "https://registry.npmjs.org/nanomatch/-/nanomatch-1.2.13.tgz", + "integrity": "sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA==", + "dev": true, + "dependencies": { + "arr-diff": "^4.0.0", + "array-unique": "^0.3.2", + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "fragment-cache": "^0.2.1", + "is-windows": "^1.0.2", + "kind-of": "^6.0.2", + "object.pick": "^1.3.0", + "regex-not": "^1.0.0", + "snapdragon": "^0.8.1", + "to-regex": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=", + "dev": true + }, + "node_modules/ncp": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ncp/-/ncp-1.0.1.tgz", + "integrity": "sha1-0VNn5cuHQyuhF9K/gP30Wuz7QkY=", + "bin": { + "ncp": "bin/ncp" + } + }, + "node_modules/negotiator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.2.tgz", + "integrity": "sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/next-tick": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/next-tick/-/next-tick-1.0.0.tgz", + "integrity": "sha1-yobR/ogoFpsBICCOPchCS524NCw=", + "dev": true + }, + "node_modules/nice-try": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/nice-try/-/nice-try-1.0.5.tgz", + "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", + "dev": true + }, + "node_modules/nise": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/nise/-/nise-5.1.0.tgz", + "integrity": "sha512-W5WlHu+wvo3PaKLsJJkgPup2LrsXCcm7AWwyNZkUnn5rwPkuPBi3Iwk5SQtN0mv+K65k7nKKjwNQ30wg3wLAQQ==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.7.0", + "@sinonjs/fake-timers": "^7.0.4", + "@sinonjs/text-encoding": "^0.7.1", + "just-extend": "^4.0.2", + "path-to-regexp": "^1.7.0" + } + }, + "node_modules/nise/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha1-ihis/Kmo9Bd+Cav8YDiTmwXR7t8=", + "dev": true + }, + "node_modules/nise/node_modules/path-to-regexp": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-1.8.0.tgz", + "integrity": "sha512-n43JRhlUKUAlibEJhPeir1ncUID16QnEjNpwzNdO3Lm4ywrBpBZ5oLD0I6br9evr1Y9JTqwRtAh7JLoOzAQdVA==", + "dev": true, + "dependencies": { + "isarray": "0.0.1" + } + }, + "node_modules/node-fs": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/node-fs/-/node-fs-0.1.7.tgz", + "integrity": "sha1-MjI8zLRsn78PwRgS1FAhzDHTJbs=", + "os": [ + "linux", + "darwin", + "freebsd", + "win32", + "smartos", + "sunos" + ], + "engines": { + "node": ">=0.1.97" + } + }, + "node_modules/node-preload": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz", + "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==", + "dev": true, + "dependencies": { + "process-on-spawn": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nodemon": { + "version": "2.0.14", + "resolved": "https://registry.npmjs.org/nodemon/-/nodemon-2.0.14.tgz", + "integrity": "sha512-frcpDx+PviKEQRSYzwhckuO2zoHcBYLHI754RE9z5h1RGtrngerc04mLpQQCPWBkH/2ObrX7We9YiwVSYZpFJQ==", + "dev": true, + "hasInstallScript": true, + "dependencies": { + "chokidar": "^3.2.2", + "debug": "^3.2.6", + "ignore-by-default": "^1.0.1", + "minimatch": "^3.0.4", + "pstree.remy": "^1.1.7", + "semver": "^5.7.1", + "supports-color": "^5.5.0", + "touch": "^3.1.0", + "undefsafe": "^2.0.3", + "update-notifier": "^5.1.0" + }, + "bin": { + "nodemon": "bin/nodemon.js" + }, + "engines": { + "node": ">=8.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/nodemon" + } + }, + "node_modules/nodemon/node_modules/debug": { + "version": "3.2.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz", + "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.1" + } + }, + "node_modules/nodemon/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nopt": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-1.0.10.tgz", + "integrity": "sha1-bd0hvSoxQXuScn3Vhfim83YI6+4=", + "dev": true, + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/normalize-package-data": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", + "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==", + "dev": true, + "dependencies": { + "hosted-git-info": "^2.1.4", + "resolve": "^1.10.0", + "semver": "2 || 3 || 4 || 5", + "validate-npm-package-license": "^3.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-url": { + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-4.5.1.tgz", + "integrity": "sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/now-and-later": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/now-and-later/-/now-and-later-2.0.1.tgz", + "integrity": "sha512-KGvQ0cB70AQfg107Xvs/Fbu+dGmZoTRJp2TaPwcwQm3/7PteUyN2BCgk8KBMPGBUXZdVwyWS8fDCGFygBm19UQ==", + "dev": true, + "dependencies": { + "once": "^1.3.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/npm-run-all": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/npm-run-all/-/npm-run-all-4.1.5.tgz", + "integrity": "sha512-Oo82gJDAVcaMdi3nuoKFavkIHBRVqQ1qvMb+9LHk/cF4P6B2m8aP04hGf7oL6wZ9BuGwX1onlLhpuoofSyoQDQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "chalk": "^2.4.1", + "cross-spawn": "^6.0.5", + "memorystream": "^0.3.1", + "minimatch": "^3.0.4", + "pidtree": "^0.3.0", + "read-pkg": "^3.0.0", + "shell-quote": "^1.6.1", + "string.prototype.padend": "^3.0.0" + }, + "bin": { + "npm-run-all": "bin/npm-run-all/index.js", + "run-p": "bin/run-p/index.js", + "run-s": "bin/run-s/index.js" + }, + "engines": { + "node": ">= 4" + } + }, + "node_modules/npm-run-all/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/npm-run-all/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=", + "dev": true + }, + "node_modules/npm-run-all/node_modules/cross-spawn": { + "version": "6.0.5", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz", + "integrity": "sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==", + "dev": true, + "dependencies": { + "nice-try": "^1.0.4", + "path-key": "^2.0.1", + "semver": "^5.5.0", + "shebang-command": "^1.2.0", + "which": "^1.2.9" + }, + "engines": { + "node": ">=4.8" + } + }, + "node_modules/npm-run-all/node_modules/load-json-file": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-4.0.0.tgz", + "integrity": "sha1-L19Fq5HjMhYjT9U62rZo607AmTs=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "parse-json": "^4.0.0", + "pify": "^3.0.0", + "strip-bom": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/parse-json": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", + "integrity": "sha1-vjX1Qlvh9/bHRxhPmKeIy5lHfuA=", + "dev": true, + "dependencies": { + "error-ex": "^1.3.1", + "json-parse-better-errors": "^1.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/path-key": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", + "integrity": "sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/path-type": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-3.0.0.tgz", + "integrity": "sha512-T2ZUsdZFHgA3u4e5PfPbjd7HDDpxPnQb5jN0SrDsjNSuVXHJqtwTnWqG0B1jZrgmJ/7lj1EmVIByWt1gxGkWvg==", + "dev": true, + "dependencies": { + "pify": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/pify": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", + "integrity": "sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/read-pkg": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-3.0.0.tgz", + "integrity": "sha1-nLxoaXj+5l0WwA4rGcI3/Pbjg4k=", + "dev": true, + "dependencies": { + "load-json-file": "^4.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/npm-run-all/node_modules/shebang-command": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", + "integrity": "sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=", + "dev": true, + "dependencies": { + "shebang-regex": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-all/node_modules/shebang-regex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", + "integrity": "sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-run-all/node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/number-is-nan": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/number-is-nan/-/number-is-nan-1.0.1.tgz", + "integrity": "sha1-CXtgK1NCKlIsGvuHkDGDNpQaAR0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/nyc": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz", + "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==", + "dev": true, + "dependencies": { + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "caching-transform": "^4.0.0", + "convert-source-map": "^1.7.0", + "decamelize": "^1.2.0", + "find-cache-dir": "^3.2.0", + "find-up": "^4.1.0", + "foreground-child": "^2.0.0", + "get-package-type": "^0.1.0", + "glob": "^7.1.6", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-hook": "^3.0.0", + "istanbul-lib-instrument": "^4.0.0", + "istanbul-lib-processinfo": "^2.0.2", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.0.2", + "make-dir": "^3.0.0", + "node-preload": "^0.2.1", + "p-map": "^3.0.0", + "process-on-spawn": "^1.0.0", + "resolve-from": "^5.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "spawn-wrap": "^2.0.0", + "test-exclude": "^6.0.0", + "yargs": "^15.0.2" + }, + "bin": { + "nyc": "bin/nyc.js" + }, + "engines": { + "node": ">=8.9" + } + }, + "node_modules/nyc/node_modules/p-map": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz", + "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/nyc/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-copy": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/object-copy/-/object-copy-0.1.0.tgz", + "integrity": "sha1-fn2Fi3gb18mRpBupde04EnVOmYw=", + "dev": true, + "dependencies": { + "copy-descriptor": "^0.1.0", + "define-property": "^0.2.5", + "kind-of": "^3.0.3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-copy/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-copy/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-inspect": { + "version": "1.9.0", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.9.0.tgz", + "integrity": "sha512-i3Bp9iTqwhaLZBxGkRfo5ZbE07BQRT7MGu8+nNgwW9ItGp1TzCTw2DLEoWwjClxBjOFI/hWljTAmYGCEwmtnOw==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object-keys": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", + "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/object-visit": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object-visit/-/object-visit-1.0.1.tgz", + "integrity": "sha1-95xEk68MU3e1n+OdOV5BBC3QRbs=", + "dev": true, + "dependencies": { + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object.assign": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.2.tgz", + "integrity": "sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "has-symbols": "^1.0.1", + "object-keys": "^1.1.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/object.defaults": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/object.defaults/-/object.defaults-1.1.0.tgz", + "integrity": "sha1-On+GgzS0B96gbaFtiNXNKeQ1/s8=", + "dev": true, + "dependencies": { + "array-each": "^1.0.1", + "array-slice": "^1.0.0", + "for-own": "^1.0.0", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object.map": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object.map/-/object.map-1.0.1.tgz", + "integrity": "sha1-z4Plncj8wK1fQlDh94s7gb2AHTc=", + "dev": true, + "dependencies": { + "for-own": "^1.0.0", + "make-iterator": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object.pick": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/object.pick/-/object.pick-1.3.0.tgz", + "integrity": "sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c=", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object.reduce": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/object.reduce/-/object.reduce-1.0.1.tgz", + "integrity": "sha1-b+NI8qx/oPlcpiEiZZkJaCW7A60=", + "dev": true, + "dependencies": { + "for-own": "^1.0.0", + "make-iterator": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/on-finished": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", + "integrity": "sha1-IPEzZIGwg811M3mSoWlxqi2QaUc=", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/one-time": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/one-time/-/one-time-1.0.0.tgz", + "integrity": "sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g==", + "dependencies": { + "fn.name": "1.x.x" + } + }, + "node_modules/openapi-default-setter": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-default-setter/-/openapi-default-setter-9.3.0.tgz", + "integrity": "sha512-Y4PtlmeStp43dyy4x+ekibGrT/LYIz6Y9gnSJ0arELX/xc5uyTC7C2qJgeXf4RJcHW+yB9Q9QvyLUNDSa+8oFg==", + "dependencies": { + "openapi-types": "^9.3.0" + } + }, + "node_modules/openapi-framework": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-framework/-/openapi-framework-9.3.0.tgz", + "integrity": "sha512-mgeEqJcf18Fnd0MQ1I2T1fLljAtu6HkU0MknPM/IoVOXRDscKgQjzLIR/FyVfNcg358MXXsgUtVgDsbVQujyYA==", + "dependencies": { + "difunc": "0.0.4", + "fs-routes": "^9.0.3", + "glob": "*", + "is-dir": "^1.0.0", + "js-yaml": "^3.10.0", + "openapi-default-setter": "^9.3.0", + "openapi-request-coercer": "^9.3.0", + "openapi-request-validator": "^9.3.0", + "openapi-response-validator": "^9.3.0", + "openapi-schema-validator": "^9.3.0", + "openapi-security-handler": "^9.3.0", + "openapi-types": "^9.3.0", + "ts-log": "^2.1.4" + } + }, + "node_modules/openapi-jsonschema-parameters": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-jsonschema-parameters/-/openapi-jsonschema-parameters-9.3.0.tgz", + "integrity": "sha512-tUNAtzlJm5YaoqQMKvonRZN0BWRVRd34ulmGgzMLL+Ga23VnSy3FyFFI46LDUeIbh9wS2NGjkuO4akE01u7Rmw==", + "dependencies": { + "openapi-types": "^9.3.0" + } + }, + "node_modules/openapi-request-coercer": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-request-coercer/-/openapi-request-coercer-9.3.0.tgz", + "integrity": "sha512-5EvH0KeRZ3ygDljPTWFEXKvW9ga4h6HGiZN29H7F4g/OQBdKyFMCRpyUQZeVauJbuk6K5mvL6TdsmqdqI3D2Bg==", + "dependencies": { + "openapi-types": "^9.3.0", + "ts-log": "^2.1.4" + } + }, + "node_modules/openapi-request-validator": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-request-validator/-/openapi-request-validator-9.3.0.tgz", + "integrity": "sha512-SmpYM8HbCn6A22CS6ysvXItwWEpp/dJLqepCfh5F16S7Isy/7txbxGimM1xyhNZh+silXH8wjsac5jfbSniXgw==", + "dependencies": { + "ajv": "^8.3.0", + "ajv-formats": "^2.1.0", + "content-type": "^1.0.4", + "openapi-jsonschema-parameters": "^9.3.0", + "openapi-types": "^9.3.0", + "ts-log": "^2.1.4" + } + }, + "node_modules/openapi-response-validator": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-response-validator/-/openapi-response-validator-9.3.0.tgz", + "integrity": "sha512-pklr94TIvl/ObZ0Gs04ihYWSi6w4k7jAerw1rSBHklb/ZbFTS5iP1t753PdSW9/7QJdXzZP/9uMADkhyURNjwA==", + "dependencies": { + "ajv": "^8.4.0", + "openapi-types": "^9.3.0" + } + }, + "node_modules/openapi-schema-validator": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-schema-validator/-/openapi-schema-validator-9.3.0.tgz", + "integrity": "sha512-KlvgZMWTu+H1FHFSZNAGj369uXl3BD1nXSIq+sXlG6P+OrsAHd3YORx0ZEZ3WGdu2LQrPGmtowGQavYXL+PLwg==", + "dependencies": { + "ajv": "^8.1.0", + "ajv-formats": "^2.0.2", + "lodash.merge": "^4.6.1", + "openapi-types": "^9.3.0" + } + }, + "node_modules/openapi-security-handler": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-security-handler/-/openapi-security-handler-9.3.0.tgz", + "integrity": "sha512-loy+sdPxjb0OuzIj0cp45kowoLEQ8z6FF0QJBFxtfDttuDssTtQ3Vw5C2kAZ/6Qu6X1y6HT4DAYdDY3iJ3iMNw==", + "dependencies": { + "openapi-types": "^9.3.0" + } + }, + "node_modules/openapi-types": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/openapi-types/-/openapi-types-9.3.0.tgz", + "integrity": "sha512-sR23YjmuwDSMsQVZDHbV9mPgi0RyniQlqR0AQxTC2/F3cpSjRFMH3CFPjoWvNqhC4OxPkDYNb2l8Mc1Me6D/KQ==" + }, + "node_modules/optionator": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.1.tgz", + "integrity": "sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/ordered-read-streams": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/ordered-read-streams/-/ordered-read-streams-1.0.1.tgz", + "integrity": "sha1-d8DLN8QVJdZBZtmQ/61+xqDhNj4=", + "dev": true, + "dependencies": { + "readable-stream": "^2.0.1" + } + }, + "node_modules/ordered-read-streams/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/ordered-read-streams/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/ordered-read-streams/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/os-locale": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/os-locale/-/os-locale-1.4.0.tgz", + "integrity": "sha1-IPnxeuKe00XoveWDsT0gCYA8FNk=", + "dev": true, + "dependencies": { + "lcid": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-cancelable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-1.1.0.tgz", + "integrity": "sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", + "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", + "dev": true, + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/package-hash": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz", + "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.15", + "hasha": "^5.0.0", + "lodash.flattendeep": "^4.4.0", + "release-zalgo": "^1.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/package-json": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/package-json/-/package-json-6.5.0.tgz", + "integrity": "sha512-k3bdm2n25tkyxcjSKzB5x8kfVxlMdgsbPr0GkZcwHsLpba6cBjqCt1KlcChKEvxHIcTB1FVMuwoijZ26xex5MQ==", + "dev": true, + "dependencies": { + "got": "^9.6.0", + "registry-auth-token": "^4.0.0", + "registry-url": "^5.0.0", + "semver": "^6.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/package-json/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/packet-reader": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-1.0.0.tgz", + "integrity": "sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==" + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-database-url": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/parse-database-url/-/parse-database-url-0.3.0.tgz", + "integrity": "sha1-NpZmMh6SfJreY838Gqr2+zdFPQ0=", + "dependencies": { + "mongodb-uri": ">= 0.9.7" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/parse-filepath": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/parse-filepath/-/parse-filepath-1.0.2.tgz", + "integrity": "sha1-pjISf1Oq89FYdvWHLz/6x2PWyJE=", + "dev": true, + "dependencies": { + "is-absolute": "^1.0.0", + "map-cache": "^0.2.0", + "path-root": "^0.1.1" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "dependencies": { + "error-ex": "^1.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/parse-node-version": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parse-node-version/-/parse-node-version-1.0.1.tgz", + "integrity": "sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/parse-passwd": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/parse-passwd/-/parse-passwd-1.0.0.tgz", + "integrity": "sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", + "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/pascalcase": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/pascalcase/-/pascalcase-0.1.1.tgz", + "integrity": "sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-dirname": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/path-dirname/-/path-dirname-1.0.2.tgz", + "integrity": "sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA=", + "dev": true + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/path-root": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/path-root/-/path-root-0.1.1.tgz", + "integrity": "sha1-mkpoFMrBwM1zNgqV8yCDyOpHRbc=", + "dev": true, + "dependencies": { + "path-root-regex": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-root-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/path-root-regex/-/path-root-regex-0.1.2.tgz", + "integrity": "sha1-v8zcjfWxLcUsi0PsONGNcsBLqW0=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-to-regexp": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", + "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" + }, + "node_modules/path-type": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", + "integrity": "sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/pathval": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.1.tgz", + "integrity": "sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==", + "dev": true, + "engines": { + "node": "*" + } + }, + "node_modules/pg": { + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.7.1.tgz", + "integrity": "sha512-7bdYcv7V6U3KAtWjpQJJBww0UEsWuh4yQ/EjNf2HeO/NnvKjpvhEIe/A/TleP6wtmSKnUnghs5A9jUoK6iDdkA==", + "dependencies": { + "buffer-writer": "2.0.0", + "packet-reader": "1.0.0", + "pg-connection-string": "^2.5.0", + "pg-pool": "^3.4.1", + "pg-protocol": "^1.5.0", + "pg-types": "^2.1.0", + "pgpass": "1.x" + }, + "engines": { + "node": ">= 8.0.0" + }, + "peerDependencies": { + "pg-native": ">=2.0.0" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.4.1", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz", + "integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.5.0.tgz", + "integrity": "sha512-muRttij7H8TqRNu/DxrAJQITO4Ac7RmX3Klyr/9mJEOBeIpgnF8f9jAfRz5d3XwQZl5qBjF9gLsUtMPJE0vezQ==" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pg/node_modules/pg-connection-string": { + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.5.0.tgz", + "integrity": "sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ==" + }, + "node_modules/pgpass": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.4.tgz", + "integrity": "sha512-YmuA56alyBq7M59vxVBfPJrGSozru8QAdoNlWuW3cz8l+UX3cWge0vTvjKhsSHSJpo3Bom8/Mm6hf0TR5GY0+w==", + "dependencies": { + "split2": "^3.1.1" + } + }, + "node_modules/picomatch": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.2.2.tgz", + "integrity": "sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pidtree": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/pidtree/-/pidtree-0.3.1.tgz", + "integrity": "sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==", + "dev": true, + "bin": { + "pidtree": "bin/pidtree.js" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", + "integrity": "sha1-clVrgM+g1IqXToDnckjoDtT3+HA=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pinkie-promise": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", + "integrity": "sha1-ITXW36ejWMBprJsXh3YogihFD/o=", + "dev": true, + "dependencies": { + "pinkie": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pkginfo": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.4.1.tgz", + "integrity": "sha1-tUGO8EOd5UJfxJlQQtztFPsqhP8=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/plugin-error": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/plugin-error/-/plugin-error-1.0.1.tgz", + "integrity": "sha512-L1zP0dk7vGweZME2i+EeakvUNqSrdiI3F91TwEoYiGrAfUXmVv6fJIq4g82PAXxNsWOp0J7ZqQy/3Szz0ajTxA==", + "dev": true, + "dependencies": { + "ansi-colors": "^1.0.1", + "arr-diff": "^4.0.0", + "arr-union": "^3.1.0", + "extend-shallow": "^3.0.2" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/plugin-error/node_modules/ansi-colors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-1.1.0.tgz", + "integrity": "sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA==", + "dev": true, + "dependencies": { + "ansi-wrap": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/posix-character-classes": { + "version": "0.1.1", + "resolved": "https://registry.npmjs.org/posix-character-classes/-/posix-character-classes-0.1.1.tgz", + "integrity": "sha1-AerA/jta9xoqbAL+q7jB/vfgDqs=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz", + "integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prepend-http": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", + "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/prettier": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz", + "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "dependencies": { + "fast-diff": "^1.1.2" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/prettier-plugin-organize-imports": { + "version": "2.3.4", + "resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-2.3.4.tgz", + "integrity": "sha512-R8o23sf5iVL/U71h9SFUdhdOEPsi3nm42FD/oDYIZ2PQa4TNWWuWecxln6jlIQzpZTDMUeO1NicJP6lLn2TtRw==", + "dev": true, + "peerDependencies": { + "prettier": ">=2.0", + "typescript": ">=2.9" + } + }, + "node_modules/pretty-hrtime": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz", + "integrity": "sha1-t+PqQkNaTJsnWdmeDyAesZWALuE=", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", + "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" + }, + "node_modules/process-on-spawn": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.0.0.tgz", + "integrity": "sha512-1WsPDsUSMmZH5LeMLegqkPDrsGgsWwk1Exipy2hvB0o/F0ASzbpIctSCcZIK1ykJvtTJULEH+20WOFjMvGnCTg==", + "dev": true, + "dependencies": { + "fromentries": "^1.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/prompt": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prompt/-/prompt-1.0.0.tgz", + "integrity": "sha1-jlcSPDlquYiJf7Mn/Trtw+c15P4=", + "dependencies": { + "colors": "^1.1.2", + "pkginfo": "0.x.x", + "read": "1.0.x", + "revalidator": "0.1.x", + "utile": "0.3.x", + "winston": "2.1.x" + }, + "engines": { + "node": ">= 0.6.6" + } + }, + "node_modules/prompt/node_modules/async": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/async/-/async-1.0.0.tgz", + "integrity": "sha1-+PwEyjoTeErenhZBr5hXjPvWR6k=" + }, + "node_modules/prompt/node_modules/winston": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/winston/-/winston-2.1.1.tgz", + "integrity": "sha1-PJNJ0ZYgf9G9/51LxD73JRDjoS4=", + "dependencies": { + "async": "~1.0.0", + "colors": "1.0.x", + "cycle": "1.0.x", + "eyes": "0.1.x", + "isstream": "0.1.x", + "pkginfo": "0.3.x", + "stack-trace": "0.0.x" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prompt/node_modules/winston/node_modules/colors": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/colors/-/colors-1.0.3.tgz", + "integrity": "sha1-BDP0TYCWgP3rYO0mDxsMJi6CpAs=", + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/prompt/node_modules/winston/node_modules/pkginfo": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/pkginfo/-/pkginfo-0.3.1.tgz", + "integrity": "sha1-Wyn2qB9wcXFC4J52W76rl7T4HiE=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", + "integrity": "sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw==", + "dependencies": { + "forwarded": "~0.1.2", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/pseudomap": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", + "integrity": "sha1-8FKijacOYYkX7wqKw0wa5aaChrM=" + }, + "node_modules/pstree.remy": { + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/pstree.remy/-/pstree.remy-1.1.8.tgz", + "integrity": "sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==", + "dev": true + }, + "node_modules/pump": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/pump/-/pump-2.0.1.tgz", + "integrity": "sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==", + "dev": true, + "dependencies": { + "end-of-stream": "^1.1.0", + "once": "^1.3.1" + } + }, + "node_modules/pumpify": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/pumpify/-/pumpify-1.5.1.tgz", + "integrity": "sha512-oClZI37HvuUJJxSKKrC17bZ9Cu0ZYhEAGPsPUy9KlMUmv9dKX2o77RUmq7f3XjIxbwyGwYzbzQ1L2Ks8sIradQ==", + "dev": true, + "dependencies": { + "duplexify": "^3.6.0", + "inherits": "^2.0.3", + "pump": "^2.0.0" + } + }, + "node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=" + }, + "node_modules/pupa": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/pupa/-/pupa-2.1.1.tgz", + "integrity": "sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==", + "dev": true, + "dependencies": { + "escape-goat": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/qs": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.10.1.tgz", + "integrity": "sha512-M528Hph6wsSVOBiYUnGf+K/7w0hNshs/duGsNXPUCLH5XAqjEtiPGwNONLV0tBH8NoGb0mvD5JubnUTrujKDTg==", + "dependencies": { + "side-channel": "^1.0.4" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "engines": { + "node": ">=0.4.x" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", + "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.4.0.tgz", + "integrity": "sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q==", + "dependencies": { + "bytes": "3.1.0", + "http-errors": "1.7.2", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/rc": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/rc/-/rc-1.2.8.tgz", + "integrity": "sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==", + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~1.3.0", + "minimist": "^1.2.0", + "strip-json-comments": "~2.0.1" + }, + "bin": { + "rc": "cli.js" + } + }, + "node_modules/read": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/read/-/read-1.0.7.tgz", + "integrity": "sha1-s9oZvQUkMal2cdRKQmNK33ELQMQ=", + "dependencies": { + "mute-stream": "~0.0.4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/read-pkg": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-1.1.0.tgz", + "integrity": "sha1-9f+qXs0pyzHAR0vKfXVra7KePyg=", + "dev": true, + "dependencies": { + "load-json-file": "^1.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg-up": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-1.0.1.tgz", + "integrity": "sha1-nWPBMnbAZZGNV/ACpX9AobZD+wI=", + "dev": true, + "dependencies": { + "find-up": "^1.0.0", + "read-pkg": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg-up/node_modules/find-up": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-1.1.2.tgz", + "integrity": "sha1-ay6YIrGizgpgq2TWEOzK1TyyTQ8=", + "dev": true, + "dependencies": { + "path-exists": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg-up/node_modules/path-exists": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-2.1.0.tgz", + "integrity": "sha1-D+tsZPD8UY2adU3V77YscCJ2H0s=", + "dev": true, + "dependencies": { + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/read-pkg/node_modules/path-type": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz", + "integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE=", + "dev": true, + "dependencies": { + "graceful-fs": "^4.1.2", + "pify": "^2.0.0", + "pinkie-promise": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/readable-stream": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", + "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/rechoir": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz", + "integrity": "sha1-hSBLVNuoLVdC4oyWdW70OvUOM4Q=", + "dev": true, + "dependencies": { + "resolve": "^1.1.6" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/regex-not": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/regex-not/-/regex-not-1.0.2.tgz", + "integrity": "sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A==", + "dev": true, + "dependencies": { + "extend-shallow": "^3.0.2", + "safe-regex": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/regexpp": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz", + "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/mysticatea" + } + }, + "node_modules/registry-auth-token": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/registry-auth-token/-/registry-auth-token-4.2.1.tgz", + "integrity": "sha512-6gkSb4U6aWJB4SF2ZvLb76yCBjcvufXBqvvEx1HbmKPkutswjW1xNVRY0+daljIYRbogN7O0etYSlbiaEQyMyw==", + "dev": true, + "dependencies": { + "rc": "^1.2.8" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/registry-url": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/registry-url/-/registry-url-5.1.0.tgz", + "integrity": "sha512-8acYXXTI0AkQv6RAOjE3vOaIXZkT9wo4LOFbBKYQEEnnMNBpKqdUrI6S4NT0KPIo/WVvJ5tE/X5LF/TQUf0ekw==", + "dev": true, + "dependencies": { + "rc": "^1.2.8" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/release-zalgo": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz", + "integrity": "sha1-CXALflB0Mpc5Mw5TXFqQ+2eFFzA=", + "dev": true, + "dependencies": { + "es6-error": "^4.0.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/remove-bom-buffer": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/remove-bom-buffer/-/remove-bom-buffer-3.0.0.tgz", + "integrity": "sha512-8v2rWhaakv18qcvNeli2mZ/TMTL2nEyAKRvzo1WtnZBl15SHyEhrCu2/xKlJyUFKHiHgfXIyuY6g2dObJJycXQ==", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5", + "is-utf8": "^0.2.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/remove-bom-stream": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/remove-bom-stream/-/remove-bom-stream-1.2.0.tgz", + "integrity": "sha1-BfGlk/FuQuH7kOv1nejlaVJflSM=", + "dev": true, + "dependencies": { + "remove-bom-buffer": "^3.0.0", + "safe-buffer": "^5.1.0", + "through2": "^2.0.3" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/remove-trailing-separator": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", + "integrity": "sha1-wkvOKig62tW8P1jg1IJJuSN52O8=", + "dev": true + }, + "node_modules/repeat-element": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/repeat-element/-/repeat-element-1.1.3.tgz", + "integrity": "sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/repeat-string": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz", + "integrity": "sha1-jcrkcOHIirwtYA//Sndihtp15jc=", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/replace-ext": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/replace-ext/-/replace-ext-1.0.1.tgz", + "integrity": "sha512-yD5BHCe7quCgBph4rMQ+0KkIRKwWCrHDOX1p1Gp6HwjPM5kVoCdKGNhN7ydqqsX6lJEnQDKZ/tFMiEdQ1dvPEw==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/replace-homedir": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/replace-homedir/-/replace-homedir-1.0.0.tgz", + "integrity": "sha1-6H9tUTuSjd6AgmDBK+f+xv9ueYw=", + "dev": true, + "dependencies": { + "homedir-polyfill": "^1.0.1", + "is-absolute": "^1.0.0", + "remove-trailing-separator": "^1.1.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz", + "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + }, + "node_modules/resolve": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.19.0.tgz", + "integrity": "sha512-rArEXAgsBG4UgRGcynxWIWKFvh/XZCcS8UJdHhwy91zwAvCZIbcs+vAbflgBnNjYMs/i/i+/Ux6IZhML1yPvxg==", + "dependencies": { + "is-core-module": "^2.1.0", + "path-parse": "^1.0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-dir": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/resolve-dir/-/resolve-dir-1.0.1.tgz", + "integrity": "sha1-eaQGRMNivoLybv/nOcm7U4IEb0M=", + "dev": true, + "dependencies": { + "expand-tilde": "^2.0.0", + "global-modules": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/resolve-options": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/resolve-options/-/resolve-options-1.1.0.tgz", + "integrity": "sha1-MrueOcBtZzONyTeMDW1gdFZq0TE=", + "dev": true, + "dependencies": { + "value-or-function": "^3.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/resolve-url": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/resolve-url/-/resolve-url-0.2.1.tgz", + "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", + "deprecated": "https://github.com/lydell/resolve-url#deprecated", + "dev": true + }, + "node_modules/responselike": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", + "integrity": "sha1-kYcg7ztjHFZCvgaPFa3lpG9Loec=", + "dev": true, + "dependencies": { + "lowercase-keys": "^1.0.0" + } + }, + "node_modules/ret": { + "version": "0.1.15", + "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", + "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==", + "dev": true, + "engines": { + "node": ">=0.12" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/revalidator": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/revalidator/-/revalidator-0.1.8.tgz", + "integrity": "sha1-/s5hv6DBtSoga9axgZgYS91SOjs=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/rimraf": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.7.1.tgz", + "integrity": "sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + } + }, + "node_modules/run-parallel": { + "version": "1.1.10", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.1.10.tgz", + "integrity": "sha512-zb/1OuZ6flOlH6tQyMPUrE3x3Ulxjlo9WIVXR4yVYi4H9UXQaeIsPbLn2R3O3vQCnDKkAl2qHiuocKKX4Tz/Sw==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/safe-regex": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/safe-regex/-/safe-regex-1.1.0.tgz", + "integrity": "sha1-QKNmnzsHfR6UPURinhV91IAjvy4=", + "dev": true, + "dependencies": { + "ret": "~0.1.10" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + }, + "node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha1-e45lYZCyKOgaZq6nSEgNgozS03o=" + }, + "node_modules/secure-json-parse": { + "version": "2.4.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.4.0.tgz", + "integrity": "sha512-Q5Z/97nbON5t/L/sH6mY2EacfjVGwrCcSi5D3btRO2GZ8pf1K1UN7Z9H5J57hjVU2Qzxr1xO+FmBhOvEkzCMmg==" + }, + "node_modules/semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/semver-diff": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/semver-diff/-/semver-diff-3.1.1.tgz", + "integrity": "sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==", + "dev": true, + "dependencies": { + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/semver-diff/node_modules/semver": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.0.tgz", + "integrity": "sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/semver-greatest-satisfied-range": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/semver-greatest-satisfied-range/-/semver-greatest-satisfied-range-1.1.0.tgz", + "integrity": "sha1-E+jCZYq5aRywzXEJMkAoDTb3els=", + "dev": true, + "dependencies": { + "sver-compat": "^1.5.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/send": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/send/-/send-0.17.1.tgz", + "integrity": "sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg==", + "dependencies": { + "debug": "2.6.9", + "depd": "~1.1.2", + "destroy": "~1.0.4", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "~1.7.2", + "mime": "1.6.0", + "ms": "2.1.1", + "on-finished": "~2.3.0", + "range-parser": "~1.2.1", + "statuses": "~1.5.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=" + }, + "node_modules/send/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/send/node_modules/ms": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.1.tgz", + "integrity": "sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg==" + }, + "node_modules/serialize-javascript": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-5.0.1.tgz", + "integrity": "sha512-SaaNal9imEO737H2c05Og0/8LUXG7EnsZyMa8MzkmuHoELfT6txuj0cMqRj6zfPKnmQ1yasR4PCJc8x+M4JSPA==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/serve-static": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.14.1.tgz", + "integrity": "sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg==", + "dependencies": { + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.17.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "node_modules/set-value": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/set-value/-/set-value-2.0.1.tgz", + "integrity": "sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw==", + "dev": true, + "dependencies": { + "extend-shallow": "^2.0.1", + "is-extendable": "^0.1.1", + "is-plain-object": "^2.0.3", + "split-string": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/set-value/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/setprototypeof": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", + "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/shell-quote": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/shell-quote/-/shell-quote-1.7.2.tgz", + "integrity": "sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==", + "dev": true + }, + "node_modules/side-channel": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", + "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "dependencies": { + "call-bind": "^1.0.0", + "get-intrinsic": "^1.0.2", + "object-inspect": "^1.9.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel/node_modules/get-intrinsic": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.1.1.tgz", + "integrity": "sha512-kWZrnVM42QCiEA2Ig1bG8zjoIMOgxWwYCEeNdwY6Tv/cOSeGpcoX4pXHfKUxNKVoArnrEr2e9srnAxxGIraS9Q==", + "dependencies": { + "function-bind": "^1.1.1", + "has": "^1.0.3", + "has-symbols": "^1.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", + "integrity": "sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA==", + "dev": true + }, + "node_modules/simple-swizzle": { + "version": "0.2.2", + "resolved": "https://registry.npmjs.org/simple-swizzle/-/simple-swizzle-0.2.2.tgz", + "integrity": "sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo=", + "dependencies": { + "is-arrayish": "^0.3.1" + } + }, + "node_modules/sinon": { + "version": "11.1.2", + "resolved": "https://registry.npmjs.org/sinon/-/sinon-11.1.2.tgz", + "integrity": "sha512-59237HChms4kg7/sXhiRcUzdSkKuydDeTiamT/jesUVHshBgL8XAmhgFo0GfK6RruMDM/iRSij1EybmMog9cJw==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^1.8.3", + "@sinonjs/fake-timers": "^7.1.2", + "@sinonjs/samsam": "^6.0.2", + "diff": "^5.0.0", + "nise": "^5.1.0", + "supports-color": "^7.2.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/sinon" + } + }, + "node_modules/sinon-chai": { + "version": "3.7.0", + "resolved": "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.7.0.tgz", + "integrity": "sha512-mf5NURdUaSdnatJx3uhoBOrY9dtL19fiOtAdT1Azxg3+lNJFiuN0uzaU3xX1LeAfL17kHQhTAJgpsfhbMJMY2g==", + "dev": true, + "peerDependencies": { + "chai": "^4.0.0", + "sinon": ">=4.0.0" + } + }, + "node_modules/sinon/node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/sinon/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/sinon/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/slice-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-4.0.0.tgz", + "integrity": "sha512-qMCMfhY040cVHT43K9BFygqYbUPFZKHOg7K73mtTWJRb8pyP3fzf4Ixd5SzdEJQ6MRUg/WBnOLxghZtKKurENQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "astral-regex": "^2.0.0", + "is-fullwidth-code-point": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/slice-ansi?sponsor=1" + } + }, + "node_modules/snapdragon": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", + "integrity": "sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg==", + "dev": true, + "dependencies": { + "base": "^0.11.1", + "debug": "^2.2.0", + "define-property": "^0.2.5", + "extend-shallow": "^2.0.1", + "map-cache": "^0.2.2", + "source-map": "^0.5.6", + "source-map-resolve": "^0.5.0", + "use": "^3.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz", + "integrity": "sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw==", + "dev": true, + "dependencies": { + "define-property": "^1.0.0", + "isobject": "^3.0.0", + "snapdragon-util": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node/node_modules/define-property": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-1.0.0.tgz", + "integrity": "sha1-dp66rz9KY6rTr56NMEybvnm/sOY=", + "dev": true, + "dependencies": { + "is-descriptor": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node/node_modules/is-accessor-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz", + "integrity": "sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node/node_modules/is-data-descriptor": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz", + "integrity": "sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-node/node_modules/is-descriptor": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/is-descriptor/-/is-descriptor-1.0.2.tgz", + "integrity": "sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg==", + "dev": true, + "dependencies": { + "is-accessor-descriptor": "^1.0.0", + "is-data-descriptor": "^1.0.0", + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-util": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/snapdragon-util/-/snapdragon-util-3.0.1.tgz", + "integrity": "sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ==", + "dev": true, + "dependencies": { + "kind-of": "^3.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon-util/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/snapdragon/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/snapdragon/node_modules/extend-shallow": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/extend-shallow/-/extend-shallow-2.0.1.tgz", + "integrity": "sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8=", + "dev": true, + "dependencies": { + "is-extendable": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.5.7.tgz", + "integrity": "sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-resolve": { + "version": "0.5.3", + "resolved": "https://registry.npmjs.org/source-map-resolve/-/source-map-resolve-0.5.3.tgz", + "integrity": "sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw==", + "deprecated": "See https://github.com/lydell/source-map-resolve#deprecated", + "dev": true, + "dependencies": { + "atob": "^2.1.2", + "decode-uri-component": "^0.2.0", + "resolve-url": "^0.2.1", + "source-map-url": "^0.4.0", + "urix": "^0.1.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.19", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.19.tgz", + "integrity": "sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-url": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/source-map-url/-/source-map-url-0.4.0.tgz", + "integrity": "sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM=", + "deprecated": "See https://github.com/lydell/source-map-url#deprecated", + "dev": true + }, + "node_modules/sparkles": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/sparkles/-/sparkles-1.0.1.tgz", + "integrity": "sha512-dSO0DDYUahUt/0/pD/Is3VIm5TGJjludZ0HVymmhYF6eNA53PVLhnUk0znSYbH8IYBuJdCE+1luR22jNLMaQdw==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/spawn-wrap": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz", + "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==", + "dev": true, + "dependencies": { + "foreground-child": "^2.0.0", + "is-windows": "^1.0.2", + "make-dir": "^3.0.0", + "rimraf": "^3.0.0", + "signal-exit": "^3.0.2", + "which": "^2.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/spawn-wrap/node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "dev": true, + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/spawn-wrap/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/spdx-correct": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.1.1.tgz", + "integrity": "sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w==", + "dev": true, + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz", + "integrity": "sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==", + "dev": true + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz", + "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==", + "dev": true, + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.7.tgz", + "integrity": "sha512-U+MTEOO0AiDzxwFvoa4JVnMV6mZlJKk2sBLt90s7G0Gd0Mlknc7kxEn3nuDPNZRta7O2uy8oLcZLVT+4sqNZHQ==", + "dev": true + }, + "node_modules/split-string": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/split-string/-/split-string-3.1.0.tgz", + "integrity": "sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw==", + "dev": true, + "dependencies": { + "extend-shallow": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/split2": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/split2/-/split2-3.2.2.tgz", + "integrity": "sha512-9NThjpgZnifTkJpzTZ7Eue85S49QwpNhZTq6GRJwObb6jnLFNGB7Qm73V5HewTROPyxD0C29xqmaI68bQtV+hg==", + "dependencies": { + "readable-stream": "^3.0.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=" + }, + "node_modules/sql-template-strings": { + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/sql-template-strings/-/sql-template-strings-2.2.2.tgz", + "integrity": "sha1-PxFQiiWt384hejBCqdMAwxk7lv8=", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/ssf": { + "version": "0.11.2", + "resolved": "https://registry.npmjs.org/ssf/-/ssf-0.11.2.tgz", + "integrity": "sha512-+idbmIXoYET47hH+d7dfm2epdOMUDjqcB4648sTZ+t2JwoyBFL/insLfB/racrDmsKB3diwsDA696pZMieAC5g==", + "dependencies": { + "frac": "~1.1.2" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/ssh2": { + "version": "0.5.4", + "resolved": "https://registry.npmjs.org/ssh2/-/ssh2-0.5.4.tgz", + "integrity": "sha1-G/a2soyW6u8mf01sRqWiUXpZnic=", + "dependencies": { + "ssh2-streams": "~0.1.15" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ssh2-streams": { + "version": "0.1.20", + "resolved": "https://registry.npmjs.org/ssh2-streams/-/ssh2-streams-0.1.20.tgz", + "integrity": "sha1-URGNFUVV31Rp7h9n4M8efoosDjo=", + "dependencies": { + "asn1": "~0.2.0", + "semver": "^5.1.0", + "streamsearch": "~0.1.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/stack-trace": { + "version": "0.0.10", + "resolved": "https://registry.npmjs.org/stack-trace/-/stack-trace-0.0.10.tgz", + "integrity": "sha1-VHxws0fo0ytOEI6hoqFZ5f3eGcA=", + "engines": { + "node": "*" + } + }, + "node_modules/static-eval": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz", + "integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==", + "dependencies": { + "escodegen": "^1.8.1" + } + }, + "node_modules/static-extend": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/static-extend/-/static-extend-0.1.2.tgz", + "integrity": "sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY=", + "dev": true, + "dependencies": { + "define-property": "^0.2.5", + "object-copy": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/static-extend/node_modules/define-property": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz", + "integrity": "sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY=", + "dev": true, + "dependencies": { + "is-descriptor": "^0.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/statuses": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-1.5.0.tgz", + "integrity": "sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow=", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/stream-exhaust": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/stream-exhaust/-/stream-exhaust-1.0.2.tgz", + "integrity": "sha512-b/qaq/GlBK5xaq1yrK9/zFcyRSTNxmcZwFLGSTG0mXgZl/4Z6GgiyYOXOvY7N3eEvFRAG1bkDRz5EPGSvPYQlw==", + "dev": true + }, + "node_modules/stream-shift": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz", + "integrity": "sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==", + "dev": true + }, + "node_modules/streamsearch": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", + "integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string.prototype.padend": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/string.prototype.padend/-/string.prototype.padend-3.1.1.tgz", + "integrity": "sha512-eCzTASPnoCr5Ht+Vn1YXgm8SB015hHKgEIMu9Nr9bQmLhRBxKRfmzSj/IQsxDFc8JInJDDFA0qXwK+xxI7wDkg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3", + "es-abstract": "^1.18.0-next.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimend": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.3.tgz", + "integrity": "sha512-ayH0pB+uf0U28CtjlLvL7NaohvR1amUvVZk+y3DYb0Ey2PUV5zPkkKy9+U1ndVEIXO8hNg18eIv9Jntbii+dKw==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/string.prototype.trimstart": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.3.tgz", + "integrity": "sha512-oBIBUy5lea5tt0ovtOFiEQaBkoBBkyJhZXzJYrSmDo5IUUqbOPvVezuRs/agBIdZ2p2Eo1FD6bD9USyBLfl3xg==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.0", + "define-properties": "^1.1.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dependencies": { + "ansi-regex": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-2.0.0.tgz", + "integrity": "sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=", + "dev": true, + "dependencies": { + "is-utf8": "^0.2.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/strip-json-comments": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-2.0.1.tgz", + "integrity": "sha1-PFMZQukIwml8DsNEhYwobHygpgo=", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/sver-compat": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/sver-compat/-/sver-compat-1.5.0.tgz", + "integrity": "sha1-PPh9/rTQe0o/FIJ7wYaz/QxkXNg=", + "dev": true, + "dependencies": { + "es6-iterator": "^2.0.1", + "es6-symbol": "^3.1.1" + } + }, + "node_modules/swagger-ui-dist": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-4.3.0.tgz", + "integrity": "sha512-RY1c3y6uuHBTu4nZPXcvrv9cnKj6MbaNMZK1NDyGHrUbQOO5WmkuMo6wi93WFzSURJk0SboD1X9nM5CtQAu2Og==" + }, + "node_modules/swagger-ui-express": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/swagger-ui-express/-/swagger-ui-express-4.3.0.tgz", + "integrity": "sha512-jN46SEEe9EoXa3ZgZoKgnSF6z0w3tnM1yqhO4Y+Q4iZVc8JOQB960EZpIAz6rNROrDApVDwcMHR0mhlnc/5Omw==", + "dependencies": { + "swagger-ui-dist": ">=4.1.3" + }, + "engines": { + "node": ">= v0.10.32" + }, + "peerDependencies": { + "express": ">=4.0.0" + } + }, + "node_modules/table": { + "version": "6.8.0", + "resolved": "https://registry.npmjs.org/table/-/table-6.8.0.tgz", + "integrity": "sha512-s/fitrbVeEyHKFa7mFdkuQMWlH1Wgw/yEXMt5xACT4ZpzWFluehAxRtUUQKPuWhaLAWhFcVx6w3oC8VKaUfPGA==", + "dev": true, + "dependencies": { + "ajv": "^8.0.1", + "lodash.truncate": "^4.4.2", + "slice-ansi": "^4.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/table/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/table/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/table/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tarn": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/tarn/-/tarn-3.0.2.tgz", + "integrity": "sha512-51LAVKUSZSVfI05vjPESNc5vwqqZpbXCsU+/+wxlOrUjk2SnFTt97v9ZgQrD4YmxYW1Px6w2KjaDitCfkvgxMQ==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/text-hex": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/text-hex/-/text-hex-1.0.0.tgz", + "integrity": "sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg==" + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, + "node_modules/through2": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/through2/-/through2-2.0.5.tgz", + "integrity": "sha512-/mrRod8xqpA+IHSLyGCQ2s8SPHiCDEeQJSep1jqLYeEUClOFG2Qsh+4FU6G9VeqpZnGW/Su8LQGc4YKni5rYSQ==", + "dev": true, + "dependencies": { + "readable-stream": "~2.3.6", + "xtend": "~4.0.1" + } + }, + "node_modules/through2-filter": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/through2-filter/-/through2-filter-3.0.0.tgz", + "integrity": "sha512-jaRjI2WxN3W1V8/FMZ9HKIBXixtiqs3SQSX4/YGIiP3gL6djW48VoZq9tDqeCWs3MT8YY5wb/zli8VW8snY1CA==", + "dev": true, + "dependencies": { + "through2": "~2.0.0", + "xtend": "~4.0.0" + } + }, + "node_modules/through2/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/through2/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/through2/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/tildify": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/tildify/-/tildify-2.0.0.tgz", + "integrity": "sha512-Cc+OraorugtXNfs50hU9KS369rFXCfgGLpfCfvlc+Ud5u6VWmUQsOAa9HbTvheQdYnrdJqqv1e5oIqXppMYnSw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/time-stamp": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/time-stamp/-/time-stamp-1.1.0.tgz", + "integrity": "sha1-dkpaEa9QVhkhsTPztE5hhofg9cM=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-absolute-glob": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/to-absolute-glob/-/to-absolute-glob-2.0.2.tgz", + "integrity": "sha1-GGX0PZ50sIItufFFt4z/fQ98hJs=", + "dev": true, + "dependencies": { + "is-absolute": "^1.0.0", + "is-negated-glob": "^1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/to-object-path": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/to-object-path/-/to-object-path-0.3.0.tgz", + "integrity": "sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68=", + "dev": true, + "dependencies": { + "kind-of": "^3.0.2" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-object-path/node_modules/kind-of": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-3.2.2.tgz", + "integrity": "sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ=", + "dev": true, + "dependencies": { + "is-buffer": "^1.1.5" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-readable-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/to-readable-stream/-/to-readable-stream-1.0.0.tgz", + "integrity": "sha512-Iq25XBt6zD5npPhlLVXGFN3/gyR2/qODcKNNyTMd4vbm39HUaOiAM4PMq0eMVC/Tkxz+Zjdsc55g9yyz+Yq00Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/to-regex": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/to-regex/-/to-regex-3.0.2.tgz", + "integrity": "sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw==", + "dev": true, + "dependencies": { + "define-property": "^2.0.2", + "extend-shallow": "^3.0.2", + "regex-not": "^1.0.2", + "safe-regex": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-regex-range": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-2.1.1.tgz", + "integrity": "sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg=", + "dev": true, + "dependencies": { + "is-number": "^3.0.0", + "repeat-string": "^1.6.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/to-through": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-through/-/to-through-2.0.0.tgz", + "integrity": "sha1-/JKtq6ByZHvAtn1rA2ZKoZUJOvY=", + "dev": true, + "dependencies": { + "through2": "^2.0.3" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/toidentifier": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.0.tgz", + "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/touch": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/touch/-/touch-3.1.0.tgz", + "integrity": "sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==", + "dev": true, + "dependencies": { + "nopt": "~1.0.10" + }, + "bin": { + "nodetouch": "bin/nodetouch.js" + } + }, + "node_modules/triple-beam": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/triple-beam/-/triple-beam-1.3.0.tgz", + "integrity": "sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw==" + }, + "node_modules/ts-log": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/ts-log/-/ts-log-2.2.3.tgz", + "integrity": "sha512-XvB+OdKSJ708Dmf9ore4Uf/q62AYDTzFcAdxc8KNML1mmAWywRFVt/dn1KYJH8Agt5UJNujfM3znU5PxgAzA2w==" + }, + "node_modules/ts-mocha": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/ts-mocha/-/ts-mocha-8.0.0.tgz", + "integrity": "sha512-Kou1yxTlubLnD5C3unlCVO7nh0HERTezjoVhVw/M5S1SqoUec0WgllQvPk3vzPMc6by8m6xD1uR1yRf8lnVUbA==", + "dev": true, + "dependencies": { + "ts-node": "7.0.1" + }, + "bin": { + "ts-mocha": "bin/ts-mocha" + }, + "engines": { + "node": ">= 6.X.X" + }, + "optionalDependencies": { + "tsconfig-paths": "^3.5.0" + }, + "peerDependencies": { + "mocha": "^3.X.X || ^4.X.X || ^5.X.X || ^6.X.X || ^7.X.X || ^8.X.X" + } + }, + "node_modules/ts-mocha/node_modules/diff": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-3.5.0.tgz", + "integrity": "sha512-A46qtFgd+g7pDZinpnwiRJtxbC1hpgf0uzP3iG89scHk0AUC7A1TGxf5OiiOUv/JMZR8GOt8hL900hV0bOy5xA==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/ts-mocha/node_modules/ts-node": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-7.0.1.tgz", + "integrity": "sha512-BVwVbPJRspzNh2yfslyT1PSbl5uIk03EZlb493RKHN4qej/D06n1cEhjlOJG69oFsE7OT8XjpTUcYf6pKTLMhw==", + "dev": true, + "dependencies": { + "arrify": "^1.0.0", + "buffer-from": "^1.1.0", + "diff": "^3.1.0", + "make-error": "^1.1.1", + "minimist": "^1.2.0", + "mkdirp": "^0.5.1", + "source-map-support": "^0.5.6", + "yn": "^2.0.0" + }, + "bin": { + "ts-node": "dist/bin.js" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/ts-node": { + "version": "10.4.0", + "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.4.0.tgz", + "integrity": "sha512-g0FlPvvCXSIO1JDF6S232P5jPYqBkRL9qly81ZgAOSU7rwI0stphCgd2kLiCrU9DjQCrJMWEqcNSjQL02s6d8A==", + "dev": true, + "dependencies": { + "@cspotcode/source-map-support": "0.7.0", + "@tsconfig/node10": "^1.0.7", + "@tsconfig/node12": "^1.0.7", + "@tsconfig/node14": "^1.0.0", + "@tsconfig/node16": "^1.0.2", + "acorn": "^8.4.1", + "acorn-walk": "^8.1.1", + "arg": "^4.1.0", + "create-require": "^1.1.0", + "diff": "^4.0.1", + "make-error": "^1.1.1", + "yn": "3.1.1" + }, + "bin": { + "ts-node": "dist/bin.js", + "ts-node-cwd": "dist/bin-cwd.js", + "ts-node-script": "dist/bin-script.js", + "ts-node-transpile-only": "dist/bin-transpile.js", + "ts-script": "dist/bin-script-deprecated.js" + }, + "peerDependencies": { + "@swc/core": ">=1.2.50", + "@swc/wasm": ">=1.2.50", + "@types/node": "*", + "typescript": ">=2.7" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "@swc/wasm": { + "optional": true + } + } + }, + "node_modules/ts-node/node_modules/acorn": { + "version": "8.6.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.6.0.tgz", + "integrity": "sha512-U1riIR+lBSNi3IbxtaHOIKdH8sLFv3NYfNv8sg7ZsNhcfl4HF2++BfqqrNAxoCLQW1iiylOj76ecnaUxz+z9yw==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/ts-node/node_modules/yn": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz", + "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/tsconfig-paths": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz", + "integrity": "sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw==", + "dev": true, + "optional": true, + "dependencies": { + "@types/json5": "^0.0.29", + "json5": "^1.0.1", + "minimist": "^1.2.0", + "strip-bom": "^3.0.0" + } + }, + "node_modules/tsconfig-paths/node_modules/json5": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.1.tgz", + "integrity": "sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow==", + "dev": true, + "optional": true, + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "json5": "lib/cli.js" + } + }, + "node_modules/tsconfig-paths/node_modules/strip-bom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz", + "integrity": "sha1-IzTBjpx1n3vdVv3vfprj1YjmjtM=", + "dev": true, + "optional": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/tslib": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", + "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==", + "dev": true + }, + "node_modules/tsutils": { + "version": "3.21.0", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.21.0.tgz", + "integrity": "sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA==", + "dev": true, + "dependencies": { + "tslib": "^1.8.1" + }, + "engines": { + "node": ">= 6" + }, + "peerDependencies": { + "typescript": ">=2.8.0 || >= 3.2.0-dev || >= 3.3.0-dev || >= 3.4.0-dev || >= 3.5.0-dev || >= 3.6.0-dev || >= 3.6.0-beta || >= 3.7.0-dev || >= 3.7.0-beta" + } + }, + "node_modules/tunnel-ssh": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/tunnel-ssh/-/tunnel-ssh-4.1.4.tgz", + "integrity": "sha512-CjBqboGvAbM7iXSX2F95kzoI+c2J81YkrHbyyo4SWNKCzU6w5LfEvXBCHu6PPriYaNvfhMKzD8bFf5Vl14YTtg==", + "dependencies": { + "debug": "2.6.9", + "lodash.defaults": "^4.1.0", + "ssh2": "0.5.4" + } + }, + "node_modules/tunnel-ssh/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/type": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/type/-/type-1.2.0.tgz", + "integrity": "sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz", + "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", + "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typedarray": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", + "integrity": "sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=" + }, + "node_modules/typedarray-to-buffer": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz", + "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==", + "dev": true, + "dependencies": { + "is-typedarray": "^1.0.0" + } + }, + "node_modules/typescript": { + "version": "4.1.6", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.6.tgz", + "integrity": "sha512-pxnwLxeb/Z5SP80JDRzVjh58KsM6jZHRAOtTpS7sXLS4ogXNKC9ANxHHZqLLeVHZN35jCtI4JdmLLbLiC1kBow==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/unc-path-regex": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz", + "integrity": "sha1-5z3T17DXxe2G+6xrCufYxqadUPo=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/undefsafe": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/undefsafe/-/undefsafe-2.0.5.tgz", + "integrity": "sha512-WxONCrssBM8TSPRqN5EmsjVrsv4A8X12J4ArBiiayv3DyyG3ZlIg6yysuuSYdZsVz3TKcTg2fd//Ujd4CHV1iA==", + "dev": true + }, + "node_modules/underscore": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", + "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==" + }, + "node_modules/undertaker": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/undertaker/-/undertaker-1.3.0.tgz", + "integrity": "sha512-/RXwi5m/Mu3H6IHQGww3GNt1PNXlbeCuclF2QYR14L/2CHPz3DFZkvB5hZ0N/QUkiXWCACML2jXViIQEQc2MLg==", + "dev": true, + "dependencies": { + "arr-flatten": "^1.0.1", + "arr-map": "^2.0.0", + "bach": "^1.0.0", + "collection-map": "^1.0.0", + "es6-weak-map": "^2.0.1", + "fast-levenshtein": "^1.0.0", + "last-run": "^1.1.0", + "object.defaults": "^1.0.0", + "object.reduce": "^1.0.0", + "undertaker-registry": "^1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/undertaker-registry": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/undertaker-registry/-/undertaker-registry-1.0.1.tgz", + "integrity": "sha1-XkvaMI5KiirlhPm5pDWaSZglzFA=", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/undertaker/node_modules/fast-levenshtein": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-1.1.4.tgz", + "integrity": "sha1-5qdUzI8V5YmHqpy9J69m/W9OWvk=", + "dev": true + }, + "node_modules/undici": { + "version": "4.16.0", + "resolved": "https://registry.npmjs.org/undici/-/undici-4.16.0.tgz", + "integrity": "sha512-tkZSECUYi+/T1i4u+4+lwZmQgLXd4BLGlrc7KZPcLIW7Jpq99+Xpc30ONv7nS6F5UNOxp/HBZSSL9MafUrvJbw==", + "engines": { + "node": ">=12.18" + } + }, + "node_modules/union-value": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/union-value/-/union-value-1.0.1.tgz", + "integrity": "sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg==", + "dev": true, + "dependencies": { + "arr-union": "^3.1.0", + "get-value": "^2.0.6", + "is-extendable": "^0.1.1", + "set-value": "^2.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unique-stream": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/unique-stream/-/unique-stream-2.3.1.tgz", + "integrity": "sha512-2nY4TnBE70yoxHkDli7DMazpWiP7xMdCYqU2nBRO0UB+ZpEkGsSija7MvmvnZFUeC+mrgiUfcHSr3LmRFIg4+A==", + "dev": true, + "dependencies": { + "json-stable-stringify-without-jsonify": "^1.0.1", + "through2-filter": "^3.0.0" + } + }, + "node_modules/unique-string": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unique-string/-/unique-string-2.0.0.tgz", + "integrity": "sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==", + "dev": true, + "dependencies": { + "crypto-random-string": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", + "integrity": "sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/unset-value": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/unset-value/-/unset-value-1.0.0.tgz", + "integrity": "sha1-g3aHP30jNRef+x5vw6jtDfyKtVk=", + "dev": true, + "dependencies": { + "has-value": "^0.3.1", + "isobject": "^3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/has-value": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/has-value/-/has-value-0.3.1.tgz", + "integrity": "sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8=", + "dev": true, + "dependencies": { + "get-value": "^2.0.3", + "has-values": "^0.1.4", + "isobject": "^2.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/has-value/node_modules/isobject": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-2.1.0.tgz", + "integrity": "sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk=", + "dev": true, + "dependencies": { + "isarray": "1.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/unset-value/node_modules/has-values": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/has-values/-/has-values-0.1.4.tgz", + "integrity": "sha1-bWHeldkd/Km5oCCJrThL/49it3E=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/upath": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/upath/-/upath-1.2.0.tgz", + "integrity": "sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg==", + "dev": true, + "engines": { + "node": ">=4", + "yarn": "*" + } + }, + "node_modules/update-notifier": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/update-notifier/-/update-notifier-5.1.0.tgz", + "integrity": "sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==", + "dev": true, + "dependencies": { + "boxen": "^5.0.0", + "chalk": "^4.1.0", + "configstore": "^5.0.1", + "has-yarn": "^2.1.0", + "import-lazy": "^2.1.0", + "is-ci": "^2.0.0", + "is-installed-globally": "^0.4.0", + "is-npm": "^5.0.0", + "is-yarn-global": "^0.3.0", + "latest-version": "^5.1.0", + "pupa": "^2.1.1", + "semver": "^7.3.4", + "semver-diff": "^3.1.1", + "xdg-basedir": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/yeoman/update-notifier?sponsor=1" + } + }, + "node_modules/update-notifier/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/update-notifier/node_modules/semver": { + "version": "7.3.5", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz", + "integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/update-notifier/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/uri-js/node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "engines": { + "node": ">=6" + } + }, + "node_modules/urix": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/urix/-/urix-0.1.0.tgz", + "integrity": "sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI=", + "deprecated": "Please see https://github.com/lydell/urix#deprecated", + "dev": true + }, + "node_modules/url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ=", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/url-parse-lax": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", + "integrity": "sha1-FrXK/Afb42dsGxmZF3gj1lA6yww=", + "dev": true, + "dependencies": { + "prepend-http": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/use": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/use/-/use-3.1.1.tgz", + "integrity": "sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, + "node_modules/utile": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/utile/-/utile-0.3.0.tgz", + "integrity": "sha1-E1LDQOuCDk2N26A5pPv6oy7U7zo=", + "dependencies": { + "async": "~0.9.0", + "deep-equal": "~0.2.1", + "i": "0.3.x", + "mkdirp": "0.x.x", + "ncp": "1.0.x", + "rimraf": "2.x.x" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", + "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/utm": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/utm/-/utm-1.1.1.tgz", + "integrity": "sha512-rJIv6i1u86OVy91Burh/6oRFfE8zOCd6Unc8fvA0N7fjQT43ogqhVeUp5NByaZPe278QyiA5wMMEaAU9Jx6QRA==" + }, + "node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/v8-compile-cache": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz", + "integrity": "sha512-l8lCEmLcLYZh4nbunNZvQCJc5pv7+RCwa8q/LdUx8u7lsWvPDKmpodJAJNwkAhJC//dFY48KuIEmjtd4RViDrA==", + "dev": true + }, + "node_modules/v8flags": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/v8flags/-/v8flags-3.2.0.tgz", + "integrity": "sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg==", + "dev": true, + "dependencies": { + "homedir-polyfill": "^1.0.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", + "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==", + "dev": true, + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/value-or-function": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/value-or-function/-/value-or-function-3.0.0.tgz", + "integrity": "sha1-HCQ6ULWVwb5Up1S/7OhWO5/42BM=", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", + "integrity": "sha1-IpnwLG3tMNSllhsLn3RSShj2NPw=", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/vinyl": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/vinyl/-/vinyl-2.2.1.tgz", + "integrity": "sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw==", + "dev": true, + "dependencies": { + "clone": "^2.1.1", + "clone-buffer": "^1.0.0", + "clone-stats": "^1.0.0", + "cloneable-readable": "^1.0.0", + "remove-trailing-separator": "^1.0.1", + "replace-ext": "^1.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/vinyl-fs": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/vinyl-fs/-/vinyl-fs-3.0.3.tgz", + "integrity": "sha512-vIu34EkyNyJxmP0jscNzWBSygh7VWhqun6RmqVfXePrOwi9lhvRs//dOaGOTRUQr4tx7/zd26Tk5WeSVZitgng==", + "dev": true, + "dependencies": { + "fs-mkdirp-stream": "^1.0.0", + "glob-stream": "^6.1.0", + "graceful-fs": "^4.0.0", + "is-valid-glob": "^1.0.0", + "lazystream": "^1.0.0", + "lead": "^1.0.0", + "object.assign": "^4.0.4", + "pumpify": "^1.3.5", + "readable-stream": "^2.3.3", + "remove-bom-buffer": "^3.0.0", + "remove-bom-stream": "^1.2.0", + "resolve-options": "^1.1.0", + "through2": "^2.0.0", + "to-through": "^2.0.0", + "value-or-function": "^3.0.0", + "vinyl": "^2.0.0", + "vinyl-sourcemap": "^1.1.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/vinyl-fs/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dev": true, + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/vinyl-fs/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==", + "dev": true + }, + "node_modules/vinyl-fs/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/vinyl-sourcemap": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/vinyl-sourcemap/-/vinyl-sourcemap-1.1.0.tgz", + "integrity": "sha1-kqgAWTo4cDqM2xHYswCtS+Y7PhY=", + "dev": true, + "dependencies": { + "append-buffer": "^1.0.2", + "convert-source-map": "^1.5.0", + "graceful-fs": "^4.1.6", + "normalize-path": "^2.1.1", + "now-and-later": "^2.0.0", + "remove-bom-buffer": "^3.0.0", + "vinyl": "^2.0.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/vinyl-sourcemap/node_modules/normalize-path": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.1.1.tgz", + "integrity": "sha1-GrKLVW4Zg2Oowab35vogE3/mrtk=", + "dev": true, + "dependencies": { + "remove-trailing-separator": "^1.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/when": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/when/-/when-2.0.1.tgz", + "integrity": "sha1-jYcv4V5oQkyRtLck6EjggH2rZkI=" + }, + "node_modules/which": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", + "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + }, + "node_modules/wide-align": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.3.tgz", + "integrity": "sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==", + "dev": true, + "dependencies": { + "string-width": "^1.0.2 || 2" + } + }, + "node_modules/wide-align/node_modules/ansi-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz", + "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/wide-align/node_modules/is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/wide-align/node_modules/string-width": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz", + "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==", + "dev": true, + "dependencies": { + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^4.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/wide-align/node_modules/strip-ansi": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz", + "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=", + "dev": true, + "dependencies": { + "ansi-regex": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/widest-line": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/widest-line/-/widest-line-3.1.0.tgz", + "integrity": "sha512-NsmoXalsWVDMGupxZ5R08ka9flZjjiLvHVAWYOKtiKM8ujtZWr9cRffak+uSE48+Ob8ObalXpwyeUiyDD6QFgg==", + "dev": true, + "dependencies": { + "string-width": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/winston": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.3.3.tgz", + "integrity": "sha512-oEXTISQnC8VlSAKf1KYSSd7J6IWuRPQqDdo8eoRNaYKLvwSb5+79Z3Yi1lrl6KDpU6/VWaxpakDAtb1oQ4n9aw==", + "dependencies": { + "@dabh/diagnostics": "^2.0.2", + "async": "^3.1.0", + "is-stream": "^2.0.0", + "logform": "^2.2.0", + "one-time": "^1.0.0", + "readable-stream": "^3.4.0", + "stack-trace": "0.0.x", + "triple-beam": "^1.3.0", + "winston-transport": "^4.4.0" + }, + "engines": { + "node": ">= 6.4.0" + } + }, + "node_modules/winston-transport": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/winston-transport/-/winston-transport-4.4.0.tgz", + "integrity": "sha512-Lc7/p3GtqtqPBYYtS6KCN3c77/2QCev51DvcJKbkFPQNoj1sinkGwLGFDxkXY9J6p9+EPnYs+D90uwbnaiURTw==", + "dependencies": { + "readable-stream": "^2.3.7", + "triple-beam": "^1.2.0" + }, + "engines": { + "node": ">= 6.4.0" + } + }, + "node_modules/winston-transport/node_modules/readable-stream": { + "version": "2.3.7", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.7.tgz", + "integrity": "sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/winston-transport/node_modules/safe-buffer": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", + "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" + }, + "node_modules/winston-transport/node_modules/string_decoder": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", + "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/winston/node_modules/async": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.0.tgz", + "integrity": "sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==" + }, + "node_modules/wmf": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wmf/-/wmf-1.0.2.tgz", + "integrity": "sha512-/p9K7bEh0Dj6WbXg4JG0xvLQmIadrner1bi45VMJTfnbVHsc7yIajZyoSoK60/dtVBs12Fm6WkUI5/3WAVsNMw==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/word": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/word/-/word-0.3.0.tgz", + "integrity": "sha512-OELeY0Q61OXpdUfTp+oweA/vtLVg5VDOXh+3he3PNzLGG/y0oylSOC1xRVj0+l4vQ3tj/bB1HVHv1ocXkQceFA==", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", + "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/workerpool": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.0.tgz", + "integrity": "sha512-toV7q9rWNYha963Pl/qyeZ6wG+3nnsyvolaNUS8+R5Wtw6qJPTxIlOP1ZSvcGhEJw+l3HMMmtiNo9Gl61G4GVg==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=" + }, + "node_modules/write-file-atomic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz", + "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "is-typedarray": "^1.0.0", + "signal-exit": "^3.0.2", + "typedarray-to-buffer": "^3.1.5" + } + }, + "node_modules/xdg-basedir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/xdg-basedir/-/xdg-basedir-4.0.0.tgz", + "integrity": "sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/xlsx": { + "version": "0.18.5", + "resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz", + "integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==", + "dependencies": { + "adler-32": "~1.3.0", + "cfb": "~1.2.1", + "codepage": "~1.15.0", + "crc-32": "~1.2.1", + "ssf": "~0.11.2", + "wmf": "~1.0.1", + "word": "~0.3.0" + }, + "bin": { + "xlsx": "bin/xlsx.njs" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/xml2js": { + "version": "0.4.23", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.4.23.tgz", + "integrity": "sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/xtend": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/y18n": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.1.tgz", + "integrity": "sha512-wNcy4NvjMYL8gogWWYAO7ZFWFfHcbdbE57tZO8e4cbpj8tfUcwrwqSl3ad8HxpYWCdXcJUCeKKZS62Av1affwQ==" + }, + "node_modules/yallist": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", + "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" + }, + "node_modules/yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "dependencies": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "dependencies": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser/node_modules/camelcase": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.2.0.tgz", + "integrity": "sha512-c7wVvbw3f37nuobQNtgsgG9POC9qMbNuMQmTCqZv23b6MIz0fcYpBiOlv9gEN/hdLdnZTDQhg6e9Dq5M1vKvfg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yargs-unparser/node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/yn": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yn/-/yn-2.0.0.tgz", + "integrity": "sha1-5a2ryKz0CPY4X8dklWhMiOavaJo=", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + }, "dependencies": { "@babel/code-frame": { "version": "7.10.4", @@ -874,6 +13366,12 @@ "integrity": "sha512-Z4TYuEKn9+RbNVk1Ll2SS4x1JeLHecolIbM/a8gveaHsW0Hr+RQMraZACwTO2VD7JvepgA6UO1A1VrbktQrIbQ==", "dev": true }, + "@types/utm": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/utm/-/utm-1.1.1.tgz", + "integrity": "sha512-iIqxs3T7X2drAkeTn0DE3Xpcx5z0MHXJFnjvvWCTzZDF0CnM5D2CGn4eEFZi+uHNnCXx4j/3vA8MO6+xXpQYqA==", + "dev": true + }, "@types/uuid": { "version": "8.3.1", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-8.3.1.tgz", @@ -1220,7 +13718,8 @@ "version": "5.3.2", "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", - "dev": true + "dev": true, + "requires": {} }, "acorn-walk": { "version": "8.2.0", @@ -1229,13 +13728,9 @@ "dev": true }, "adler-32": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.2.0.tgz", - "integrity": "sha1-aj5r8KY5ALoVZSgIyxXGgT0aXyU=", - "requires": { - "exit-on-epipe": "~1.0.1", - "printj": "~1.1.0" - } + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.1.tgz", + "integrity": "sha512-ynZ4w/nUUv5rrsR8UUGoe1VC9hZj6V5hU9Qw1HlMDJGEJw5S7TfTErWTjMys6M7vr0YWcPqs3qAr4ss0nDfP+A==" }, "adm-zip": { "version": "0.5.5", @@ -1253,9 +13748,9 @@ } }, "ajv": { - "version": "8.6.3", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.6.3.tgz", - "integrity": "sha512-SMJOdDP6LqTkD0Uq8qLi+gMwSt0imXLSV080qFVwJCpH9U6Mb+SUGHAXM0KNbcBPguytWyvFxcHgMLe2D2XSpw==", + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", "requires": { "fast-deep-equal": "^3.1.1", "json-schema-traverse": "^1.0.0", @@ -1553,7 +14048,7 @@ "asynckit": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" }, "atob": { "version": "2.1.2", @@ -2021,21 +14516,6 @@ "printj": "~1.3.0" }, "dependencies": { - "adler-32": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/adler-32/-/adler-32-1.3.0.tgz", - "integrity": "sha512-f5nltvjl+PRUh6YNfUstRaXwJxtfnKEWhAWWlmKvh+Y3J2+98a0KKVYDEhz6NdKGqswLhjNGznxfSsZGOvOd9g==", - "requires": { - "printj": "~1.2.2" - }, - "dependencies": { - "printj": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/printj/-/printj-1.2.3.tgz", - "integrity": "sha512-sanczS6xOJOg7IKDvi4sGOUOe7c1tsEzjwlLFH/zgwx/uyImVM9/rgBkc8AfiQa/Vg54nRd8mkm9yI7WV/O+WA==" - } - } - }, "printj": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/printj/-/printj-1.3.0.tgz", @@ -2540,13 +15020,9 @@ "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" }, "crc-32": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.0.tgz", - "integrity": "sha512-1uBwHxF+Y/4yF5G48fwnKq6QsIXheor3ZLPT80yGBV1oEUwpPojlEhQbWKVw1VwcTQyMGHK1/XMmTjmlsmTTGA==", - "requires": { - "exit-on-epipe": "~1.0.1", - "printj": "~1.1.0" - } + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/crc-32/-/crc-32-1.2.2.tgz", + "integrity": "sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==" }, "create-require": { "version": "1.1.1", @@ -3480,11 +15956,6 @@ "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", "integrity": "sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ=" }, - "exit-on-epipe": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/exit-on-epipe/-/exit-on-epipe-1.0.1.tgz", - "integrity": "sha512-h2z5mrROTxce56S+pnvAV890uu7ls7f1kEvVGJbw1OlFH3/mlJ5bkXu0KRyW94v37zzHPiUd55iLn3DA7TjWpw==" - }, "expand-brackets": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/expand-brackets/-/expand-brackets-2.1.4.tgz", @@ -3841,11 +16312,6 @@ "resolved": "https://registry.npmjs.org/fecha/-/fecha-4.2.0.tgz", "integrity": "sha512-aN3pcx/DSmtyoovUudctc8+6Hl4T+hI9GBBHLjA76jdZl7+b1sgh5g4k+u/GL3dTy1/pnYzKp69FpJ0OicE3Wg==" }, - "fflate": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/fflate/-/fflate-0.7.1.tgz", - "integrity": "sha512-VYM2Xy1gSA5MerKzCnmmuV2XljkpKwgJBKezW+495TTnTCh1x5HcYa1aH8wRU/MfTGhW4ziXqgwprgQUVl3Ohw==" - }, "file-entry-cache": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", @@ -4132,7 +16598,8 @@ "fs-routes": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/fs-routes/-/fs-routes-9.0.3.tgz", - "integrity": "sha512-Y5tkylY9fQ1jm11FdJoptzqIG3OyzqrOF16W5odNlIdqFqb2355IbNB3jQkE+C268mSShLmIur8ynYCgL/Yg/g==" + "integrity": "sha512-Y5tkylY9fQ1jm11FdJoptzqIG3OyzqrOF16W5odNlIdqFqb2355IbNB3jQkE+C268mSShLmIur8ynYCgL/Yg/g==", + "requires": {} }, "fs.realpath": { "version": "1.0.0", @@ -5556,6 +18023,11 @@ } } }, + "jsonpath-plus": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-7.2.0.tgz", + "integrity": "sha512-zBfiUPM5nD0YZSBT/o/fbCUlCcepMIdP0CJZxM1+KgA4f2T206f6VAg9e7mX35+KlMaIc5qXW34f3BnwJ3w+RA==" + }, "jsonwebtoken": { "version": "8.5.1", "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", @@ -7379,7 +19851,8 @@ "pg-pool": { "version": "3.4.1", "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.4.1.tgz", - "integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==" + "integrity": "sha512-TVHxR/gf3MeJRvchgNHxsYsTCHQ+4wm3VIHSS19z8NC0+gioEhq1okDY1sm/TYbfoP6JLFx01s0ShvZ3puP/iQ==", + "requires": {} }, "pg-protocol": { "version": "1.5.0", @@ -7536,7 +20009,8 @@ "version": "2.3.4", "resolved": "https://registry.npmjs.org/prettier-plugin-organize-imports/-/prettier-plugin-organize-imports-2.3.4.tgz", "integrity": "sha512-R8o23sf5iVL/U71h9SFUdhdOEPsi3nm42FD/oDYIZ2PQa4TNWWuWecxln6jlIQzpZTDMUeO1NicJP6lLn2TtRw==", - "dev": true + "dev": true, + "requires": {} }, "pretty-hrtime": { "version": "1.0.3", @@ -7544,11 +20018,6 @@ "integrity": "sha1-t+PqQkNaTJsnWdmeDyAesZWALuE=", "dev": true }, - "printj": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/printj/-/printj-1.1.2.tgz", - "integrity": "sha512-zA2SmoLaxZyArQTOPj5LXecR+RagfPSU5Kw1qP+jkWeNlrq+eJZyY2oS68SU1Z/7/myXM4lo9716laOFAVStCQ==" - }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", @@ -8262,7 +20731,8 @@ "version": "3.7.0", "resolved": "https://registry.npmjs.org/sinon-chai/-/sinon-chai-3.7.0.tgz", "integrity": "sha512-mf5NURdUaSdnatJx3uhoBOrY9dtL19fiOtAdT1Azxg3+lNJFiuN0uzaU3xX1LeAfL17kHQhTAJgpsfhbMJMY2g==", - "dev": true + "dev": true, + "requires": {} }, "slash": { "version": "3.0.0", @@ -8621,6 +21091,14 @@ "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", "integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=" }, + "string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "requires": { + "safe-buffer": "~5.2.0" + } + }, "string-width": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", @@ -8662,14 +21140,6 @@ "define-properties": "^1.1.3" } }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "requires": { - "safe-buffer": "~5.2.0" - } - }, "strip-ansi": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", @@ -9399,6 +21869,11 @@ "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" }, + "utm": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/utm/-/utm-1.1.1.tgz", + "integrity": "sha512-rJIv6i1u86OVy91Burh/6oRFfE8zOCd6Unc8fvA0N7fjQT43ogqhVeUp5NByaZPe278QyiA5wMMEaAU9Jx6QRA==" + }, "uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", @@ -9723,27 +22198,17 @@ "dev": true }, "xlsx": { - "version": "0.17.3", - "resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.17.3.tgz", - "integrity": "sha512-dGZKfyPSXfnoITruwisuDVZkvnxhjgqzWJXBJm2Khmh01wcw8//baRUvhroVRhW2SLbnlpGcCZZbeZO1qJgMIw==", + "version": "0.18.5", + "resolved": "https://registry.npmjs.org/xlsx/-/xlsx-0.18.5.tgz", + "integrity": "sha512-dmg3LCjBPHZnQp5/F/+nnTa+miPJxUXB6vtk42YjBBKayDNagxGEeIdWApkYPOf3Z3pm3k62Knjzp7lMeTEtFQ==", "requires": { - "adler-32": "~1.2.0", - "cfb": "^1.1.4", + "adler-32": "~1.3.0", + "cfb": "~1.2.1", "codepage": "~1.15.0", - "commander": "~2.17.1", - "crc-32": "~1.2.0", - "exit-on-epipe": "~1.0.1", - "fflate": "^0.7.1", + "crc-32": "~1.2.1", "ssf": "~0.11.2", "wmf": "~1.0.1", "word": "~0.3.0" - }, - "dependencies": { - "commander": { - "version": "2.17.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.17.1.tgz", - "integrity": "sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==" - } } }, "xml2js": { diff --git a/api/package.json b/api/package.json index 51db4a569b..07684925c7 100644 --- a/api/package.json +++ b/api/package.json @@ -34,7 +34,7 @@ "@turf/helpers": "~6.5.0", "@turf/meta": "~6.5.0", "adm-zip": "~0.5.5", - "ajv": "~8.6.3", + "ajv": "~8.12.0", "aws-sdk": "~2.742.0", "axios": "~0.21.4", "clamdjs": "~1.0.2", @@ -46,6 +46,7 @@ "fast-json-patch": "~3.1.1", "form-data": "~4.0.0", "jsonpath": "~1.1.1", + "jsonpath-plus": "^7.2.0", "jsonwebtoken": "~8.5.1", "jwks-rsa": "~2.0.5", "knex": "~1.0.1", @@ -58,9 +59,10 @@ "sql-template-strings": "~2.2.2", "swagger-ui-express": "~4.3.0", "typescript": "~4.1.6", + "utm": "^1.1.1", "uuid": "~8.3.2", "winston": "~3.3.3", - "xlsx": "~0.17.0", + "xlsx": "~0.18.5", "xml2js": "~0.4.23" }, "devDependencies": { @@ -81,6 +83,7 @@ "@types/sinon": "~10.0.4", "@types/sinon-chai": "~3.2.5", "@types/swagger-ui-express": "~4.1.3", + "@types/utm": "^1.1.1", "@types/uuid": "~8.3.1", "@types/xml2js": "~0.4.9", "@types/yamljs": "~0.2.31", diff --git a/api/src/app.ts b/api/src/app.ts index 36088a5f35..8874216c72 100644 --- a/api/src/app.ts +++ b/api/src/app.ts @@ -4,7 +4,7 @@ import multer from 'multer'; import { OpenAPIV3 } from 'openapi-types'; import swaggerUIExperss from 'swagger-ui-express'; import { defaultPoolConfig, initDBPool } from './database/db'; -import { ensureHTTPError, HTTPErrorType } from './errors/custom-error'; +import { ensureHTTPError, HTTPErrorType } from './errors/http-error'; import { rootAPIDoc } from './openapi/root-api-doc'; import { authenticateRequest } from './request-handlers/security/authentication'; import { getLogger } from './utils/logger'; @@ -88,6 +88,11 @@ const openAPIFramework = initialize({ // If `next` is not included express will silently skip calling the `errorMiddleware` entirely. // eslint-disable-next-line @typescript-eslint/no-unused-vars errorMiddleware: function (error, req, res, next) { + if (res.headersSent) { + // response has already been sent + return; + } + // Ensure all errors (intentionally thrown or not) are in the same format as specified by the schema const httpError = ensureHTTPError(error); @@ -156,6 +161,13 @@ function validateAllResponses(req: Request, res: Response, next: NextFunction) { if (!isStrictValidation || !validationResult?.errors) { return json.apply(res, args); } else { + defaultLog.debug({ + label: 'validateAllResponses', + message: validationMessage, + responseBody: body, + errors: errorList + }); + return res.status(500).json({ name: HTTPErrorType.INTERNAL_SERVER_ERROR, status: 500, diff --git a/api/src/constants/codes.ts b/api/src/constants/codes.ts index b48e19332d..c0d5b5ede9 100644 --- a/api/src/constants/codes.ts +++ b/api/src/constants/codes.ts @@ -1,3 +1,5 @@ +// Note, more recent additions to this list may not be in order based on their `id`. If adding a new item to this list, +// double check what the highest id is (don't assume its based on the item at the bottom of this list). export const coordinator_agency = [ { id: 1, name: 'A Rocha Canada' }, { id: 2, name: 'Aarde Environmental Ltd.' }, @@ -153,7 +155,9 @@ export const coordinator_agency = [ { id: 152, name: 'Michigan State University' }, { id: 153, name: 'Mid Vancouver Island Habitat Enhancement Society' }, { id: 154, name: 'Ministry of Environment & Climate Change Strategy' }, + { id: 267, name: 'Ministry of Forests' }, { id: 155, name: 'Ministry of Forests, Lands, Natural Resource Operations & Rural Development' }, + { id: 266, name: 'Ministry of Lands, Water, and Resource Stewardship' }, { id: 156, name: 'Ministry of Transportation & Infrastructure' }, { id: 157, name: 'Minnow Environmental Inc.' }, { id: 158, name: 'Montana Fish, Wildlife & Parks' }, diff --git a/api/src/constants/database.ts b/api/src/constants/database.ts index e07bdd2a8a..4712039799 100644 --- a/api/src/constants/database.ts +++ b/api/src/constants/database.ts @@ -7,5 +7,12 @@ export enum SYSTEM_IDENTITY_SOURCE { DATABASE = 'DATABASE', IDIR = 'IDIR', - BCEID = 'BCEID' + BCEID_BASIC = 'BCEIDBASIC', + BCEID_BUSINESS = 'BCEIDBUSINESS', + SYSTEM = 'SYSTEM' +} + +export enum SCHEMAS { + API = 'BIOHUB_DAPI_V1', + DATA = 'BIOHUB' } diff --git a/api/src/constants/keycloak.ts b/api/src/constants/keycloak.ts deleted file mode 100644 index 27490050f6..0000000000 --- a/api/src/constants/keycloak.ts +++ /dev/null @@ -1,4 +0,0 @@ -// Possible identity sources for BCEID users -export const EXTERNAL_BCEID_IDENTITY_SOURCES = ['BCEID-BASIC-AND-BUSINESS', 'BCEID']; -// Possible identity sources for IDIR users -export const EXTERNAL_IDIR_IDENTITY_SOURCES = ['IDIR']; diff --git a/api/src/constants/notifications.ts b/api/src/constants/notifications.ts index 0db0d8711d..f236b566f1 100644 --- a/api/src/constants/notifications.ts +++ b/api/src/constants/notifications.ts @@ -1,4 +1,4 @@ -import { IgcNotifyGenericMessage } from '../models/gcnotify'; +import { IgcNotifyGenericMessage } from '../services/gcnotify-service'; //admin email template for new access requests export const ACCESS_REQUEST_ADMIN_EMAIL: IgcNotifyGenericMessage = { diff --git a/api/src/constants/status.ts b/api/src/constants/status.ts index 49cfc4d3a8..53874880fa 100644 --- a/api/src/constants/status.ts +++ b/api/src/constants/status.ts @@ -25,8 +25,79 @@ export enum SUBMISSION_STATUS_TYPE { 'SUBMISSION_DATA_INGESTED' = 'Submission Data Ingested', 'SECURED' = 'Secured', 'AWAITING CURRATION' = 'Awaiting Curration', - 'PUBLISHED' = 'Published', 'REJECTED' = 'Rejected', 'ON HOLD' = 'On Hold', + 'SYSTEM_ERROR' = 'System Error', + + //Failure + 'FAILED_OCCURRENCE_PREPARATION' = 'Failed to prepare submission', + 'INVALID_MEDIA' = 'Media is not valid', + 'FAILED_VALIDATION' = 'Failed to validate', + 'FAILED_TRANSFORMED' = 'Failed to transform', + 'FAILED_PROCESSING_OCCURRENCE_DATA' = 'Failed to process occurrence data', + 'FAILED_SUMMARY_PREPARATION' = 'Failed to prepare summary submission' +} + +export enum SUMMARY_SUBMISSION_MESSAGE_TYPE { + 'DUPLICATE_HEADER' = 'Duplicate Header', + 'UNKNOWN_HEADER' = 'Unknown Header', + 'MISSING_REQUIRED_HEADER' = 'Missing Required Header', + 'MISSING_RECOMMENDED_HEADER' = 'Missing Recommended Header', + 'DANGLING_PARENT_CHILD_KEY' = 'Missing Child Key from Parent', + 'MISCELLANEOUS' = 'Miscellaneous', + 'MISSING_REQUIRED_FIELD' = 'Missing Required Field', + 'UNEXPECTED_FORMAT' = 'Unexpected Format', + 'OUT_OF_RANGE' = 'Out of Range', + 'INVALID_VALUE' = 'Invalid Value', + 'MISSING_VALIDATION_SCHEMA' = 'Missing Validation Schema', + 'INVALID_MEDIA' = 'Media is Invalid', + 'INVALID_XLSX_CSV' = 'XLSX CSV is Invalid', + 'FAILED_TO_GET_TEMPLATE_NAME_VERSION' = 'Missing Name or Version Number', + 'FAILED_GET_VALIDATION_RULES' = 'Failed to Get Validation Rules', + 'FAILED_PARSE_VALIDATION_SCHEMA' = 'Failed to Parse Validation Schema', + 'UNSUPPORTED_FILE_TYPE' = 'Unsupported File Type', + 'FOUND_VALIDATION' = 'Found Validation', 'SYSTEM_ERROR' = 'System Error' } + +// Message types that match the submission_message_type table +export enum SUBMISSION_MESSAGE_TYPE { + 'DUPLICATE_HEADER' = 'Duplicate Header', + 'UNKNOWN_HEADER' = 'Unknown Header', + 'MISSING_REQUIRED_HEADER' = 'Missing Required Header', + 'MISSING_RECOMMENDED_HEADER' = 'Missing Recommended Header', + 'DANGLING_PARENT_CHILD_KEY' = 'Missing Child Key from Parent', + 'MISCELLANEOUS' = 'Miscellaneous', + 'MISSING_REQUIRED_FIELD' = 'Missing Required Field', + 'UNEXPECTED_FORMAT' = 'Unexpected Format', + 'OUT_OF_RANGE' = 'Out of Range', + 'INVALID_VALUE' = 'Invalid Value', + 'MISSING_VALIDATION_SCHEMA' = 'Missing Validation Schema', + 'FAILED_GET_OCCURRENCE' = 'Failed to Get Occurrence Submission', + 'FAILED_GET_FILE_FROM_S3' = 'Failed to get file from S3', + 'FAILED_UPLOAD_FILE_TO_S3' = 'Failed to upload file to S3', + 'FAILED_PARSE_SUBMISSION' = 'Failed to parse submission', + 'FAILED_PREP_DWC_ARCHIVE' = 'Failed to prep DarwinCore Archive', + 'FAILED_PREP_XLSX' = 'Failed to prep XLSX', + 'FAILED_PERSIST_PARSE_ERRORS' = 'Failed to persist parse errors', + 'FAILED_GET_VALIDATION_RULES' = 'Failed to get validation rules', + 'FAILED_GET_TRANSFORMATION_RULES' = 'Failed to get transformation rules', + 'FAILED_PERSIST_TRANSFORMATION_RESULTS' = 'Failed to persist transformation results', + 'FAILED_TRANSFORM_XLSX' = 'Failed to transform XLSX', + 'FAILED_VALIDATE_DWC_ARCHIVE' = 'Failed to validate DarwinCore Archive', + 'FAILED_PERSIST_VALIDATION_RESULTS' = 'Failed to persist validation results', + 'FAILED_UPDATE_OCCURRENCE_SUBMISSION' = 'Failed to update occurrence submission', + 'FAILED_TO_GET_TRANSFORM_SCHEMA' = 'Unable to get transform schema for submission', + 'FAILED_TO_GET_TEMPLATE_NAME_VERSION' = 'Missing name or version number.', + 'INVALID_MEDIA' = 'Media is invalid', + 'INVALID_XLSX_CSV' = 'Media is not a valid XLSX CSV file.', + 'UNSUPPORTED_FILE_TYPE' = 'File submitted is not a supported type', + 'NON_UNIQUE_KEY' = 'Duplicate Key(s) found in file.', + 'MISMATCHED_TEMPLATE_SURVEY_SPECIES' = 'Mismatched template with survey focal species' +} + +export enum MESSAGE_CLASS_NAME { + NOTICE = 'Notice', + ERROR = 'Error', + WARNING = 'Warning' +} diff --git a/api/src/database/db.test.ts b/api/src/database/db.test.ts index 5af73ff347..e55e62f82f 100644 --- a/api/src/database/db.test.ts +++ b/api/src/database/db.test.ts @@ -4,8 +4,8 @@ import * as pg from 'pg'; import Sinon from 'sinon'; import SQL from 'sql-template-strings'; import { SYSTEM_IDENTITY_SOURCE } from '../constants/database'; -import { HTTPError } from '../errors/custom-error'; -import { setSystemUserContextSQL } from '../queries/database/user-context-queries'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { HTTPError } from '../errors/http-error'; import * as db from './db'; import { getAPIUserDBConnection, getDBConnection, getDBPool, getKnex, IDBConnection, initDBPool } from './db'; @@ -51,7 +51,11 @@ describe('db', () => { describe('DBConnection', () => { const sinonSandbox = Sinon.createSandbox(); - const mockKeycloakToken = { preferred_username: 'test@idir' }; + const mockKeycloakToken = { + preferred_username: 'testguid@idir', + idir_username: 'testuser', + identity_provider: SYSTEM_IDENTITY_SOURCE.IDIR + }; const queryStub = sinonSandbox.stub().resolves(); const releaseStub = sinonSandbox.stub().resolves(); @@ -79,12 +83,6 @@ describe('db', () => { expect(getDBPoolStub).to.have.been.calledOnce; expect(connectStub).to.have.been.calledOnce; - const expectedSystemUserContextSQL = setSystemUserContextSQL('test', SYSTEM_IDENTITY_SOURCE.IDIR); - expect(queryStub).to.have.been.calledWith( - expectedSystemUserContextSQL?.text, - expectedSystemUserContextSQL?.values - ); - expect(queryStub).to.have.been.calledWith('BEGIN'); }); }); @@ -114,17 +112,24 @@ describe('db', () => { it('throws an error', async () => { const getDBPoolStub = sinonSandbox.stub(db, 'getDBPool').returns(undefined); - let expectedError: Error; + let expectedError: ApiExecuteSQLError; try { await connection.open(); expect.fail('Expected an error to be thrown'); } catch (error) { - expectedError = error as Error; + expectedError = error as ApiExecuteSQLError; } - expect(expectedError.message).to.equal('DBPool is not initialized'); + expect(expectedError.message).to.equal('Failed to execute SQL'); + expect(expectedError.errors?.length).to.be.greaterThan(0); + expectedError.errors?.forEach((item) => { + expect(item).to.be.instanceOf(Error); + if (item instanceof Error) { + expect(item.message).to.be.eql('DBPool is not initialized'); + } + }); expect(getDBPoolStub).to.have.been.calledOnce; expect(connectStub).not.to.have.been.called; @@ -193,16 +198,24 @@ describe('db', () => { it('throws an error', async () => { sinonSandbox.stub(db, 'getDBPool').returns((mockPool as unknown) as pg.Pool); - let expectedError: Error; + let expectedError: ApiExecuteSQLError; try { await connection.commit(); expect.fail('Expected an error to be thrown'); } catch (error) { - expectedError = error as Error; + expectedError = error as ApiExecuteSQLError; } - expect(expectedError.message).to.equal('DBConnection is not open'); + expect(expectedError.message).to.equal('Failed to execute SQL'); + + expect(expectedError.errors?.length).to.be.greaterThan(0); + expectedError.errors?.forEach((item) => { + expect(item).to.be.instanceOf(Error); + if (item instanceof Error) { + expect(item.message).to.be.eql('DBConnection is not open'); + } + }); }); }); }); @@ -224,16 +237,24 @@ describe('db', () => { it('throws an error', async () => { sinonSandbox.stub(db, 'getDBPool').returns((mockPool as unknown) as pg.Pool); - let expectedError: Error; + let expectedError: ApiExecuteSQLError; try { await connection.rollback(); expect.fail('Expected an error to be thrown'); } catch (error) { - expectedError = error as Error; + expectedError = error as ApiExecuteSQLError; } - expect(expectedError.message).to.equal('DBConnection is not open'); + expect(expectedError.message).to.equal('Failed to execute SQL'); + + expect(expectedError.errors?.length).to.be.greaterThan(0); + expectedError.errors?.forEach((item) => { + expect(item).to.be.instanceOf(Error); + if (item instanceof Error) { + expect(item.message).to.be.eql('DBConnection is not open'); + } + }); }); }); }); @@ -265,16 +286,24 @@ describe('db', () => { it('throws an error', async () => { sinonSandbox.stub(db, 'getDBPool').returns((mockPool as unknown) as pg.Pool); - let expectedError: Error; + let expectedError: ApiExecuteSQLError; try { await connection.query('sql query'); expect.fail('Expected an error to be thrown'); } catch (error) { - expectedError = error as Error; + expectedError = error as ApiExecuteSQLError; } - expect(expectedError.message).to.equal('DBConnection is not open'); + expect(expectedError.message).to.equal('Failed to execute SQL'); + + expect(expectedError.errors?.length).to.be.greaterThan(0); + expectedError.errors?.forEach((item) => { + expect(item).to.be.instanceOf(Error); + if (item instanceof Error) { + expect(item.message).to.be.eql('DBConnection is not open'); + } + }); }); }); }); @@ -298,7 +327,7 @@ describe('db', () => { it('throws an error', async () => { sinonSandbox.stub(db, 'getDBPool').returns((mockPool as unknown) as pg.Pool); - let expectedError: Error; + let expectedError: ApiExecuteSQLError; try { const sqlStatement = SQL`sql query ${123}`; @@ -306,10 +335,17 @@ describe('db', () => { expect.fail('Expected an error to be thrown'); } catch (error) { - expectedError = error as Error; + expectedError = error as ApiExecuteSQLError; } - - expect(expectedError.message).to.equal('DBConnection is not open'); + expect(expectedError.message).to.equal('Failed to execute SQL'); + + expect(expectedError.errors?.length).to.be.greaterThan(0); + expectedError.errors?.forEach((item) => { + expect(item).to.be.instanceOf(Error); + if (item instanceof Error) { + expect(item.message).to.be.eql('DBConnection is not open'); + } + }); }); }); }); @@ -317,6 +353,10 @@ describe('db', () => { }); describe('getAPIUserDBConnection', () => { + beforeEach(() => { + process.env.DB_USER_API = 'example_db_username'; + }); + afterEach(() => { Sinon.restore(); }); @@ -328,8 +368,12 @@ describe('db', () => { getAPIUserDBConnection(); + const DB_USERNAME = process.env.DB_USER_API; + expect(getDBConnectionStub).to.have.been.calledWith({ - preferred_username: 'biohub_api@database' + preferred_username: `${DB_USERNAME}@database`, + sims_system_username: DB_USERNAME, + identity_provider: 'database' }); }); }); diff --git a/api/src/database/db.ts b/api/src/database/db.ts index 2cd86c2f0f..a40b7a0541 100644 --- a/api/src/database/db.ts +++ b/api/src/database/db.ts @@ -1,18 +1,17 @@ import knex, { Knex } from 'knex'; import * as pg from 'pg'; -import { SQLStatement } from 'sql-template-strings'; -import { ApiExecuteSQLError, ApiGeneralError } from '../errors/custom-error'; -import { queries } from '../queries/queries'; -import { getUserIdentifier, getUserIdentitySource } from '../utils/keycloak-utils'; +import SQL, { SQLStatement } from 'sql-template-strings'; +import { ApiExecuteSQLError, ApiGeneralError } from '../errors/api-error'; +import { getUserGuid, getUserIdentifier, getUserIdentitySource } from '../utils/keycloak-utils'; import { getLogger } from '../utils/logger'; const defaultLog = getLogger('database/db'); -const DB_HOST = process.env.DB_HOST; -const DB_PORT = Number(process.env.DB_PORT); -const DB_USERNAME = process.env.DB_USER_API; -const DB_PASSWORD = process.env.DB_USER_API_PASS; -const DB_DATABASE = process.env.DB_DATABASE; +const getDbHost = () => process.env.DB_HOST; +const getDbPort = () => Number(process.env.DB_PORT); +const getDbUsername = () => process.env.DB_USER_API; +const getDbPassword = () => process.env.DB_USER_API_PASS; +const getDbDatabase = () => process.env.DB_DATABASE; const DB_POOL_SIZE: number = Number(process.env.DB_POOL_SIZE) || 20; const DB_CONNECTION_TIMEOUT: number = Number(process.env.DB_CONNECTION_TIMEOUT) || 0; @@ -21,11 +20,11 @@ const DB_IDLE_TIMEOUT: number = Number(process.env.DB_IDLE_TIMEOUT) || 10000; const DB_CLIENT = 'pg'; export const defaultPoolConfig: pg.PoolConfig = { - user: DB_USERNAME, - password: DB_PASSWORD, - database: DB_DATABASE, - port: DB_PORT, - host: DB_HOST, + user: getDbUsername(), + password: getDbPassword(), + database: getDbDatabase(), + port: getDbPort(), + host: getDbHost(), max: DB_POOL_SIZE, connectionTimeoutMillis: DB_CONNECTION_TIMEOUT, idleTimeoutMillis: DB_IDLE_TIMEOUT @@ -33,7 +32,7 @@ export const defaultPoolConfig: pg.PoolConfig = { // Custom type handler for psq `DATE` type to prevent local time/zone information from being added. // Why? By default, node-postgres assumes local time/zone for any psql `DATE` or `TIME` types that don't have timezone information. -// This Can lead to unexpected behaviour when the original psql `DATE` value was intentionally omitting time/zone information. +// This Can lead to unexpected behavior when the original psql `DATE` value was intentionally omitting time/zone information. // PSQL date types: https://www.postgresql.org/docs/12/datatype-datetime.html // node-postgres type handling (see bottom of page): https://node-postgres.com/features/types pg.types.setTypeParser(pg.types.builtins.DATE, (stringValue: string) => { @@ -313,24 +312,49 @@ export const getDBConnection = function (keycloakToken: object): IDBConnection { * Sets the _systemUserId if successful. */ const _setUserContext = async () => { + const userGuid = getUserGuid(_token); const userIdentifier = getUserIdentifier(_token); const userIdentitySource = getUserIdentitySource(_token); + defaultLog.debug({ label: '_setUserContext', userGuid, userIdentifier, userIdentitySource }); - if (!userIdentifier || !userIdentitySource) { + if (!userGuid || !userIdentifier || !userIdentitySource) { throw new ApiGeneralError('Failed to identify authenticated user'); } - // Set the user context for all queries made using this connection - const setSystemUserContextSQLStatement = queries.database.setSystemUserContextSQL( - userIdentifier, - userIdentitySource - ); + // Patch user GUID + const patchUserGuidSqlStatement = SQL` + UPDATE + system_user + SET + user_guid = ${userGuid.toLowerCase()} + WHERE + system_user_id + IN ( + SELECT + su.system_user_id + FROM + system_user su + LEFT JOIN + user_identity_source uis + ON + uis.user_identity_source_id = su.user_identity_source_id + WHERE + su.user_identifier ILIKE ${userIdentifier} + AND + uis.name ILIKE ${userIdentitySource} + AND + user_guid IS NULL + ); + `; - if (!setSystemUserContextSQLStatement) { - throw new ApiExecuteSQLError('Failed to build SQL user context statement'); - } + // Set the user context for all queries made using this connection + const setSystemUserContextSQLStatement = SQL` + SELECT api_set_context(${userGuid}, ${userIdentitySource}); + `; try { + await _client.query(patchUserGuidSqlStatement.text, patchUserGuidSqlStatement.values); + const response = await _client.query( setSystemUserContextSQLStatement.text, setSystemUserContextSQLStatement.values @@ -343,14 +367,14 @@ export const getDBConnection = function (keycloakToken: object): IDBConnection { }; return { - open: _open, - query: _query, - sql: _sql, - knex: _knex, - release: _release, - commit: _commit, - rollback: _rollback, - systemUserId: _getSystemUserID + open: asyncErrorWrapper(_open), + query: asyncErrorWrapper(_query), + sql: asyncErrorWrapper(_sql), + knex: asyncErrorWrapper(_knex), + release: syncErrorWrapper(_release), + commit: asyncErrorWrapper(_commit), + rollback: asyncErrorWrapper(_rollback), + systemUserId: syncErrorWrapper(_getSystemUserID) }; }; @@ -363,7 +387,11 @@ export const getDBConnection = function (keycloakToken: object): IDBConnection { * @return {*} {IDBConnection} */ export const getAPIUserDBConnection = (): IDBConnection => { - return getDBConnection({ preferred_username: 'biohub_api@database' }); + return getDBConnection({ + preferred_username: `${getDbUsername()}@database`, + sims_system_username: getDbUsername(), + identity_provider: 'database' + }); }; /** @@ -379,3 +407,53 @@ export const getKnex = = any, TResult = Rec > => { return knex({ client: DB_CLIENT }); }; + +/** + * An asynchronous wrapper function that will catch any exceptions thrown by the wrapped function + * + * @param fn the function to be wrapped + * @returns Promise A Promise with the wrapped functions return value + */ +const asyncErrorWrapper = ( + fn: (...args: WrapperArgs) => Promise +) => async (...args: WrapperArgs): Promise => { + try { + return await fn(...args); + } catch (err) { + throw parseError(err); + } +}; + +/** + * A synchronous wrapper function that will catch any exceptions thrown by the wrapped function + * + * @param fn the function to be wrapped + * @returns WrapperReturn The wrapped functions return value + */ +const syncErrorWrapper = (fn: (...args: WrapperArgs) => WrapperReturn) => ( + ...args: WrapperArgs +): WrapperReturn => { + try { + return fn(...args); + } catch (err) { + throw parseError(err); + } +}; + +/** + * This function parses the passed in error and translates them into a human readable error + * + * @param error error to be parsed + * @returns an error to throw + */ +const parseError = (error: any) => { + switch (error.message) { + // error thrown by DB trigger based on revision_count + // will be thrown if two updates to the same record are made concurrently + case 'CONCURRENCY_EXCEPTION': + throw new ApiExecuteSQLError('Failed to update stale data', [error]); + default: + // Generic error thrown if not captured above + throw new ApiExecuteSQLError('Failed to execute SQL', [error]); + } +}; diff --git a/api/src/errors/api-error.test.ts b/api/src/errors/api-error.test.ts new file mode 100644 index 0000000000..d8a6da0028 --- /dev/null +++ b/api/src/errors/api-error.test.ts @@ -0,0 +1,25 @@ +import { expect } from 'chai'; +import { describe } from 'mocha'; +import { ApiErrorType, ApiExecuteSQLError, ApiGeneralError, ApiUnknownError } from './api-error'; + +describe('ApiError', () => { + describe('No error value provided', () => { + let message: string; + + before(() => { + message = 'response message'; + }); + + it('Creates Api General error', function () { + expect(new ApiGeneralError(message).name).to.equal(ApiErrorType.GENERAL); + }); + + it('Creates Api Unknown error', function () { + expect(new ApiUnknownError(message).name).to.equal(ApiErrorType.UNKNOWN); + }); + + it('Creates Api execute SQL error', function () { + expect(new ApiExecuteSQLError(message).name).to.equal(ApiErrorType.EXECUTE_SQL); + }); + }); +}); diff --git a/api/src/errors/api-error.ts b/api/src/errors/api-error.ts new file mode 100644 index 0000000000..c88a77e524 --- /dev/null +++ b/api/src/errors/api-error.ts @@ -0,0 +1,82 @@ +export enum ApiErrorType { + BUILD_SQL = 'Error constructing SQL query', + EXECUTE_SQL = 'Error executing SQL query', + GENERAL = 'Error', + UNKNOWN = 'Unknown Error' +} + +export class ApiError extends Error { + errors?: (string | object)[]; + + constructor(name: ApiErrorType, message: string, errors?: (string | object)[], stack?: string) { + super(message); + + this.name = name; + this.errors = errors || []; + this.stack = stack; + + if (stack) { + this.stack = stack; + } + + if (!this.stack) { + Error.captureStackTrace(this); + } + } +} + +/** + * Api encountered an error. + * + * @export + * @class ApiGeneralError + * @extends {ApiError} + */ +export class ApiGeneralError extends ApiError { + constructor(message: string, errors?: (string | object)[]) { + super(ApiErrorType.GENERAL, message, errors); + } +} + +/** + * API encountered an unknown/unexpected error. + * + * @export + * @class ApiUnknownError + * @extends {ApiError} + */ +export class ApiUnknownError extends ApiError { + constructor(message: string, errors?: (string | object)[]) { + super(ApiErrorType.UNKNOWN, message, errors); + } +} + +/** + * API executed a query against the database, but the response was missing data, or indicated the query failed. + * + * Examples: + * - A query to select rows that are expected to exist returns with `rows=[]`. + * - A query to insert a new record returns with `rowCount=0` indicating no new row was added. + * + * @export + * @class ApiExecuteSQLError + * @extends {ApiError} + */ +export class ApiExecuteSQLError extends ApiError { + constructor(message: string, errors?: (string | object)[]) { + super(ApiErrorType.EXECUTE_SQL, message, errors); + } +} + +/** + * API failed to build SQL a query. + * + * @export + * @class ApiBuildSQLError + * @extends {ApiError} + */ +export class ApiBuildSQLError extends ApiError { + constructor(message: string, errors?: (string | object)[]) { + super(ApiErrorType.BUILD_SQL, message, errors); + } +} diff --git a/api/src/errors/custom-error.test.ts b/api/src/errors/http-error.test.ts similarity index 74% rename from api/src/errors/custom-error.test.ts rename to api/src/errors/http-error.test.ts index 94d88508f9..3a79ce25dd 100644 --- a/api/src/errors/custom-error.test.ts +++ b/api/src/errors/http-error.test.ts @@ -1,47 +1,8 @@ import { expect } from 'chai'; import { describe } from 'mocha'; import { DatabaseError } from 'pg'; -import { - ApiBuildSQLError, - ApiError, - ApiErrorType, - ApiExecuteSQLError, - ApiGeneralError, - ApiUnknownError, - ensureHTTPError, - HTTP400, - HTTP401, - HTTP403, - HTTP409, - HTTP500, - HTTPError -} from './custom-error'; - -describe('ApiError', () => { - describe('No error value provided', () => { - let message: string; - - before(() => { - message = 'response message'; - }); - - it('Creates Api General error', function () { - expect(new ApiGeneralError(message).name).to.equal(ApiErrorType.GENERAL); - }); - - it('Creates Api Unknown error', function () { - expect(new ApiUnknownError(message).name).to.equal(ApiErrorType.UNKNOWN); - }); - - it('Creates Api build SQL error', function () { - expect(new ApiBuildSQLError(message).name).to.equal(ApiErrorType.BUILD_SQL); - }); - - it('Creates Api execute SQL error', function () { - expect(new ApiExecuteSQLError(message).name).to.equal(ApiErrorType.EXECUTE_SQL); - }); - }); -}); +import { ApiError, ApiErrorType } from './api-error'; +import { ensureHTTPError, HTTP400, HTTP401, HTTP403, HTTP409, HTTP500, HTTPError } from './http-error'; describe('HTTPError', () => { describe('No error value provided', () => { diff --git a/api/src/errors/custom-error.ts b/api/src/errors/http-error.ts similarity index 62% rename from api/src/errors/custom-error.ts rename to api/src/errors/http-error.ts index 4cb82552a5..125f425578 100644 --- a/api/src/errors/custom-error.ts +++ b/api/src/errors/http-error.ts @@ -1,87 +1,5 @@ import { DatabaseError } from 'pg'; - -export enum ApiErrorType { - BUILD_SQL = 'Error constructing SQL query', - EXECUTE_SQL = 'Error executing SQL query', - GENERAL = 'Error', - UNKNOWN = 'Unknown Error' -} - -export class ApiError extends Error { - errors?: (string | object)[]; - - constructor(name: ApiErrorType, message: string, errors?: (string | object)[], stack?: string) { - super(message); - - this.name = name; - this.errors = errors || []; - this.stack = stack; - - if (stack) { - this.stack = stack; - } - - if (!this.stack) { - Error.captureStackTrace(this); - } - } -} - -/** - * Api encountered an error. - * - * @export - * @class ApiGeneralError - * @extends {ApiError} - */ -export class ApiGeneralError extends ApiError { - constructor(message: string, errors?: (string | object)[]) { - super(ApiErrorType.GENERAL, message, errors); - } -} - -/** - * API encountered an unknown/unexpected error. - * - * @export - * @class ApiUnknownError - * @extends {ApiError} - */ -export class ApiUnknownError extends ApiError { - constructor(message: string, errors?: (string | object)[]) { - super(ApiErrorType.UNKNOWN, message, errors); - } -} - -/** - * API failed to build SQL a query. - * - * @export - * @class ApiBuildSQLError - * @extends {ApiError} - */ -export class ApiBuildSQLError extends ApiError { - constructor(message: string, errors?: (string | object)[]) { - super(ApiErrorType.BUILD_SQL, message, errors); - } -} - -/** - * API executed a query against the database, but the response was missing data, or indicated the query failed. - * - * Examples: - * - A query to select rows that are expected to exist returns with `rows=[]`. - * - A query to insert a new record returns with `rowCount=0` indicating no new row was added. - * - * @export - * @class ApiExecuteSQLError - * @extends {ApiError} - */ -export class ApiExecuteSQLError extends ApiError { - constructor(message: string, errors?: (string | object)[]) { - super(ApiErrorType.EXECUTE_SQL, message, errors); - } -} +import { ApiError } from './api-error'; export enum HTTPErrorType { BAD_REQUEST = 'Bad Request', diff --git a/api/src/json-schema/validation-schema.ts b/api/src/json-schema/validation-schema.ts index 8956214670..708bdcdcdb 100644 --- a/api/src/json-schema/validation-schema.ts +++ b/api/src/json-schema/validation-schema.ts @@ -29,6 +29,14 @@ export const submissionValidationSchema = { items: { $ref: '#/$defs/submission_validation' } + }, + workbookValidations: { + description: + 'An array of validations to apply across multiple worksheets within the given workbook submission file', + type: 'array', + items: { + $ref: '#/$defs/workbook_validation' + } } }, $defs: { @@ -63,7 +71,7 @@ export const submissionValidationSchema = { additionalProperties: false }, column: { - description: 'An single column within a file/sheet', + description: 'A single column within a file/sheet', type: 'object', required: ['name'], properties: { @@ -97,6 +105,15 @@ export const submissionValidationSchema = { } ] }, + workbook_validation: { + title: 'Workbook Validation', + description: 'The validators that can be applied against a workbook submission file.', + anyOf: [ + { + $ref: '#/$defs/workbook_parent_child_key_match_validator' + } + ] + }, file_validation: { title: 'File/Sheet Validation', description: 'The validators that can be applied against a file/sheet within a submission file.', @@ -112,6 +129,9 @@ export const submissionValidationSchema = { }, { $ref: '#/$defs/file_valid_columns_validator' + }, + { + $ref: '#/$defs/file_column_unique_validator' } ] }, @@ -119,6 +139,9 @@ export const submissionValidationSchema = { title: 'Column Validation', description: 'The validators that can be applied against a column within a file/sheet.', anyOf: [ + { + $ref: '#/$defs/column_required_validator' + }, { $ref: '#/$defs/column_format_validator' }, @@ -128,12 +151,6 @@ export const submissionValidationSchema = { { $ref: '#/$defs/column_range_validator' }, - { - $ref: '#/$defs/column_unique_validator' - }, - { - $ref: '#/$defs/column_key_validator' - }, { $ref: '#/$defs/column_numeric_validator' } @@ -165,6 +182,36 @@ export const submissionValidationSchema = { }, additionalProperties: false }, + workbook_parent_child_key_match_validator: { + description: + 'Validates that this workbook submission file does not contain keys belonging to a child sheet that are missing in its parent sheet', + type: 'object', + properties: { + workbook_parent_child_key_match_validator: { + type: 'object', + required: ['child_worksheet_name', 'parent_worksheet_name', 'column_names'], + properties: { + description: { + type: 'string' + }, + child_worksheet_name: { + type: 'string' + }, + parent_worksheet_name: { + type: 'string' + }, + column_names: { + type: 'array', + items: { + type: 'string' + } + } + }, + additionalProperties: false + } + }, + additionalProperties: false + }, mimetype_validator: { description: 'Validates that the mimetype of this submission/file is in an allowed set of values', type: 'object', @@ -287,6 +334,25 @@ export const submissionValidationSchema = { }, additionalProperties: false }, + column_required_validator: { + description: 'Validates that this column value is not empty', + type: 'object', + properties: { + column_required_validator: { + type: 'object', + properties: { + name: { + type: 'string' + }, + description: { + type: 'string' + } + }, + additionalProperties: false + } + }, + additionalProperties: false + }, column_format_validator: { description: 'Validates that this column value matches a regex', type: 'object', @@ -316,7 +382,6 @@ export const submissionValidationSchema = { }, additionalProperties: false }, - column_numeric_validator: { description: 'Validates that this column is a number', type: 'object', @@ -386,54 +451,21 @@ export const submissionValidationSchema = { }, additionalProperties: false }, - column_unique_validator: { - description: 'Validates that this column value is unique within this column', + file_column_unique_validator: { + description: 'Validates that the column(s) are unique', type: 'object', properties: { - column_unique_validator: { + file_column_unique_validator: { type: 'object', properties: { - name: { - type: 'string' - }, - description: { - type: 'string' - }, - is_unique: { - type: 'boolean' - } - }, - additionalProperties: false - } - }, - additionalProperties: false - }, - column_key_validator: { - description: 'Validates that this column value has a matching counterpart in the target `file` and `column`', - type: 'object', - properties: { - column_key_validator: { - type: 'object', - properties: { - name: { - type: 'string' - }, - description: { - type: 'string' - }, - parent_key: { - type: 'object', - properties: { - file: { - type: 'string' - }, - column: { - type: 'string' - } + column_names: { + type: 'array', + items: { + type: 'string' } - } - }, - additionalProperties: false + }, + additionalProperties: false + } } }, additionalProperties: false diff --git a/api/src/models/gcnotify.ts b/api/src/models/gcnotify.ts deleted file mode 100644 index 61e45de0e0..0000000000 --- a/api/src/models/gcnotify.ts +++ /dev/null @@ -1,16 +0,0 @@ -export interface IgcNotifyPostReturn { - content: object; - id: string; - reference: string; - scheduled_for: string; - template: object; - uri: string; -} - -export interface IgcNotifyGenericMessage { - subject: string; - header: string; - body1: string; - body2: string; - footer: string; -} diff --git a/api/src/models/occurrence-create.ts b/api/src/models/occurrence-create.ts index f37cd76c5c..b0b72e86ad 100644 --- a/api/src/models/occurrence-create.ts +++ b/api/src/models/occurrence-create.ts @@ -1,7 +1,3 @@ -import { getLogger } from '../utils/logger'; - -const defaultLog = getLogger('models/occurrence-create'); - /** * Pre-processes POST occurrences data * @@ -21,8 +17,6 @@ export class PostOccurrence { eventDate: string; constructor(obj?: any) { - defaultLog.debug({ label: 'PostOccurrence', message: 'params', obj }); - this.associatedTaxa = obj?.associatedTaxa || null; this.lifeStage = obj?.lifeStage || null; this.sex = obj?.sex || null; diff --git a/api/src/models/occurrence-view.ts b/api/src/models/occurrence-view.ts deleted file mode 100644 index f19c486070..0000000000 --- a/api/src/models/occurrence-view.ts +++ /dev/null @@ -1,44 +0,0 @@ -import { getLogger } from '../utils/logger'; - -const defaultLog = getLogger('models/occurrence-view'); - -/** - * Pre-processes GET occurrences data for view-only purposes - * - * @export - * @class GetOccurrencesViewData - */ -export class GetOccurrencesViewData { - occurrences: any[]; - - constructor(occurrencesData?: any) { - defaultLog.debug({ - label: 'GetOccurrencesViewData', - message: 'params', - occurrencesData: { - ...occurrencesData, - geometry: occurrencesData?.geometry?.map((item: any) => { - return { ...item, geometry: 'Too big to print' }; - }) - } - }); - - this.occurrences = occurrencesData?.map((occurrence: any) => { - const feature = - (occurrence.geometry && { type: 'Feature', geometry: JSON.parse(occurrence.geometry), properties: {} }) || null; - - return { - geometry: feature, - taxonId: occurrence.taxonid, - occurrenceId: occurrence.occurrence_id, - individualCount: Number(occurrence.individualcount), - lifeStage: occurrence.lifestage, - sex: occurrence.sex, - organismQuantity: Number(occurrence.organismquantity), - organismQuantityType: occurrence.organismquantitytype, - vernacularName: occurrence.vernacularname, - eventDate: occurrence.eventdate - }; - }); - } -} diff --git a/api/src/models/permit-no-sampling.test.ts b/api/src/models/permit-no-sampling.test.ts deleted file mode 100644 index 5c2f0ecfca..0000000000 --- a/api/src/models/permit-no-sampling.test.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { PostPermitNoSamplingObject } from './permit-no-sampling'; -import { PostPermitData } from './project-create'; - -describe('postPermitNoSamplingObject', () => { - describe('No values provided', () => { - let postPermitNoSamplingObject: PostPermitNoSamplingObject; - - before(() => { - postPermitNoSamplingObject = new PostPermitNoSamplingObject(null); - }); - - it('sets coordinator to default values', function () { - expect(postPermitNoSamplingObject.coordinator).to.equal(null); - }); - - it('sets permit to default values', function () { - expect(postPermitNoSamplingObject.permit).to.equal(null); - }); - }); - - describe('All values provided', () => { - let postPermitNoSamplingObject: PostPermitNoSamplingObject; - - const obj = { - coordinator: { - first_name: 'first_name', - last_name: 'last_name', - email_address: 'email_address', - coordinator_agency: 'coordinator_agency', - share_contact_details: 'true' - }, - permit: { - permits: [ - { - permit_number: '123', - permit_type: 'type 1' - }, - { - permit_number: '456', - permit_type: 'type 2' - } - ], - existing_permits: [1, 2] - } - }; - - before(() => { - postPermitNoSamplingObject = new PostPermitNoSamplingObject(obj); - }); - - it('sets coordinator', function () { - expect(postPermitNoSamplingObject.coordinator).to.deep.equal({ - first_name: 'first_name', - last_name: 'last_name', - email_address: 'email_address', - coordinator_agency: 'coordinator_agency', - share_contact_details: true - }); - }); - - it('sets permit', function () { - expect(postPermitNoSamplingObject.permit).to.deep.equal({ - permits: [ - { - permit_number: '123', - permit_type: 'type 1' - }, - { - permit_number: '456', - permit_type: 'type 2' - } - ], - existing_permits: [ - { - permit_id: 1 - }, - { - permit_id: 2 - } - ] - }); - }); - }); -}); - -describe('PostPermitNoSamplingData', () => { - describe('No values provided', () => { - let postPermitNoSamplingData: PostPermitData; - - before(() => { - postPermitNoSamplingData = new PostPermitData(null); - }); - - it('sets permit to default values', function () { - expect(postPermitNoSamplingData.permits).to.eql([]); - }); - }); - - describe('All values provided where permits has no length', () => { - let postPermitNoSamplingData: PostPermitData; - - const obj = { permits: [] }; - - before(() => { - postPermitNoSamplingData = new PostPermitData(obj); - }); - - it('sets permits', function () { - expect(postPermitNoSamplingData.permits).to.eql([]); - }); - }); - - describe('All values provided where permits is null', () => { - let postPermitNoSamplingData: PostPermitData; - - const obj = { permits: null }; - - before(() => { - postPermitNoSamplingData = new PostPermitData(obj); - }); - - it('sets permits', function () { - expect(postPermitNoSamplingData.permits).to.eql([]); - }); - }); - - describe('All values provided where permits is a valid array', () => { - let postPermitNoSamplingData: PostPermitData; - - const obj = { permits: [{ permit_number: 1, permit_type: 'type' }] }; - - before(() => { - postPermitNoSamplingData = new PostPermitData(obj); - }); - - it('sets permits', function () { - expect(postPermitNoSamplingData.permits).to.eql([ - { - permit_number: 1, - permit_type: 'type' - } - ]); - }); - }); -}); diff --git a/api/src/models/permit-no-sampling.ts b/api/src/models/permit-no-sampling.ts deleted file mode 100644 index 18da6fc329..0000000000 --- a/api/src/models/permit-no-sampling.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { getLogger } from '../utils/logger'; -import { PostCoordinatorData, PostPermitData } from './project-create'; - -const defaultLog = getLogger('models/permit-no-sampling'); - -/** - * Processes POST /permit-no-sampling request data when no sampling is conducted. - * - * @export - * @class PostPermitNoSamplingObject - */ -export class PostPermitNoSamplingObject { - coordinator: PostCoordinatorData; - permit: PostPermitData; - - constructor(obj?: any) { - defaultLog.debug({ label: 'PostPermitNoSamplingObject', message: 'params', obj }); - - this.coordinator = (obj?.coordinator && new PostCoordinatorData(obj.coordinator)) || null; - this.permit = (obj?.permit && new PostPermitData(obj.permit)) || null; - } -} - -export interface IPostPermitNoSampling { - permit_number: string; - permit_type: string; -} diff --git a/api/src/models/project-create.test.ts b/api/src/models/project-create.test.ts index 1925ad1ece..3ad169c4a0 100644 --- a/api/src/models/project-create.test.ts +++ b/api/src/models/project-create.test.ts @@ -8,7 +8,6 @@ import { PostLocationData, PostObjectivesData, PostPartnershipsData, - PostPermitData, PostProjectData, PostProjectObject } from './project-create'; @@ -25,10 +24,6 @@ describe('PostProjectObject', () => { expect(projectPostObject.coordinator).to.equal(null); }); - it('sets permit', function () { - expect(projectPostObject.permit).to.equal(null); - }); - it('sets project', function () { expect(projectPostObject.project).to.equal(null); }); @@ -65,13 +60,6 @@ describe('PostProjectObject', () => { coordinator_agency: 'agency', share_contact_details: 'true' }, - permit: { - permits: [ - { - permit_number: 1 - } - ] - }, project: { project_name: 'name_test_data', project_type: 'test_type', @@ -105,7 +93,7 @@ describe('PostProjectObject', () => { ] }, funding: { - funding_sources: [ + fundingSources: [ { agency_id: 1, investment_action_category: 1, @@ -255,100 +243,6 @@ describe('PostObjectivesData', () => { }); }); -describe('PostPermitData', () => { - describe('No values provided', () => { - let projectPermitData: PostPermitData; - - before(() => { - projectPermitData = new PostPermitData(null); - }); - - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([]); - }); - }); - - describe('All values provided are null', () => { - let projectPermitData: PostPermitData; - - before(() => { - projectPermitData = new PostPermitData({ - permits: null - }); - }); - - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([]); - }); - }); - - describe('All values provided are empty arrays', () => { - let projectPermitData: PostPermitData; - - before(() => { - projectPermitData = new PostPermitData({ - permits: [] - }); - }); - - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([]); - }); - }); - - describe('All values provided with sampling conducted as true', () => { - let projectPermitData: PostPermitData; - - const obj = { - permits: [ - { - permit_number: '1', - permit_type: 'permit type' - } - ] - }; - - before(() => { - projectPermitData = new PostPermitData(obj); - }); - - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([ - { - permit_number: '1', - permit_type: 'permit type' - } - ]); - }); - }); - - describe('All values provided with sampling conducted as false', () => { - let projectPermitData: PostPermitData; - - const obj = { - permits: [ - { - permit_number: '1', - permit_type: 'permit type' - } - ] - }; - - before(() => { - projectPermitData = new PostPermitData(obj); - }); - - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([ - { - permit_number: '1', - permit_type: 'permit type' - } - ]); - }); - }); -}); - describe('PostCoordinatorData', () => { describe('No values provided', () => { let projectCoordinatorData: PostCoordinatorData; @@ -628,8 +522,8 @@ describe('PostFundingData', () => { data = new PostFundingData(null); }); - it('sets funding_sources', () => { - expect(data.funding_sources).to.eql([]); + it('sets fundingSources', () => { + expect(data.fundingSources).to.eql([]); }); }); @@ -637,15 +531,15 @@ describe('PostFundingData', () => { let data: PostFundingData; const obj = { - funding_sources: null + fundingSources: null }; before(() => { data = new PostFundingData(obj); }); - it('sets funding_sources', () => { - expect(data.funding_sources).to.eql([]); + it('sets fundingSources', () => { + expect(data.fundingSources).to.eql([]); }); }); @@ -653,15 +547,15 @@ describe('PostFundingData', () => { let data: PostFundingData; const obj = { - funding_sources: [] + fundingSources: [] }; before(() => { data = new PostFundingData(obj); }); - it('sets funding_sources', () => { - expect(data.funding_sources).to.eql([]); + it('sets fundingSources', () => { + expect(data.fundingSources).to.eql([]); }); }); @@ -669,7 +563,7 @@ describe('PostFundingData', () => { let data: PostFundingData; const obj = { - funding_sources: [ + fundingSources: [ { agency_id: 1, investment_action_category: 1, @@ -685,8 +579,8 @@ describe('PostFundingData', () => { data = new PostFundingData(obj); }); - it('sets funding_sources', () => { - expect(data.funding_sources).to.eql(obj.funding_sources); + it('sets fundingSources', () => { + expect(data.fundingSources).to.eql(obj.fundingSources); }); }); }); diff --git a/api/src/models/project-create.ts b/api/src/models/project-create.ts index 2bf8bdc510..8a2c47ec8d 100644 --- a/api/src/models/project-create.ts +++ b/api/src/models/project-create.ts @@ -11,7 +11,6 @@ const defaultLog = getLogger('models/project-create'); */ export class PostProjectObject { coordinator: PostCoordinatorData; - permit: PostPermitData; project: PostProjectData; objectives: PostObjectivesData; location: PostLocationData; @@ -23,7 +22,6 @@ export class PostProjectObject { defaultLog.debug({ label: 'PostProjectObject', message: 'params', obj }); this.coordinator = (obj?.coordinator && new PostCoordinatorData(obj.coordinator)) || null; - this.permit = (obj?.permit && new PostPermitData(obj.permit)) || null; this.project = (obj?.project && new PostProjectData(obj.project)) || null; this.objectives = (obj?.project && new PostObjectivesData(obj.objectives)) || null; this.location = (obj?.location && new PostLocationData(obj.location)) || null; @@ -57,49 +55,6 @@ export class PostCoordinatorData { } } -export interface IPostPermit { - permit_number: string; - permit_type: string; -} - -export interface IPostExistingPermit { - permit_id: number; -} - -/** - * Processes POST /project permit data - * - * @export - * @class PostPermitData - */ -export class PostPermitData { - permits: IPostPermit[]; - existing_permits: IPostExistingPermit[]; - - constructor(obj?: any) { - defaultLog.debug({ label: 'PostPermitData', message: 'params', obj }); - - this.permits = - (obj?.permits?.length && - obj.permits.map((item: any) => { - return { - permit_number: item.permit_number, - permit_type: item.permit_type - }; - })) || - []; - - this.existing_permits = - (obj?.existing_permits?.length && - obj.existing_permits.map((item: any) => { - return { - permit_id: item - }; - })) || - []; - } -} - /** * Processes POST /project project data. * @@ -237,13 +192,13 @@ export class PostFundingSource { * @class PostFundingData */ export class PostFundingData { - funding_sources: PostFundingSource[]; + fundingSources: PostFundingSource[]; constructor(obj?: any) { defaultLog.debug({ label: 'PostFundingData', message: 'params', obj }); - this.funding_sources = - (obj?.funding_sources?.length && obj.funding_sources.map((item: any) => new PostFundingSource(item))) || []; + this.fundingSources = + (obj?.fundingSources?.length && obj.fundingSources.map((item: any) => new PostFundingSource(item))) || []; } } diff --git a/api/src/models/project-survey-attachments.test.ts b/api/src/models/project-survey-attachments.test.ts index 926b08d400..d9cda9149e 100644 --- a/api/src/models/project-survey-attachments.test.ts +++ b/api/src/models/project-survey-attachments.test.ts @@ -30,8 +30,7 @@ describe('GetAttachmentsData', () => { file_name: 'filename', create_date: '2020/04/04', file_size: 24, - file_type: 'Video', - security_token: 'token123' + file_type: 'Video' } ]; @@ -51,7 +50,6 @@ describe('GetAttachmentsData', () => { expect(getAttachmentsData.attachmentsList[0].id).to.equal(1); expect(getAttachmentsData.attachmentsList[0].lastModified).to.match(new RegExp('2020-04-04T.*')); expect(getAttachmentsData.attachmentsList[0].size).to.equal(24); - expect(getAttachmentsData.attachmentsList[0].securityToken).to.equal('token123'); }); }); }); @@ -103,7 +101,7 @@ describe('PutReportAttachmentMetaData', () => { expect(putReportAttachmentData.year_published).to.equal(0); expect(putReportAttachmentData.authors).to.eql([]); expect(putReportAttachmentData.description).to.equal(null); - expect(putReportAttachmentData.revision_count).to.equal(null); + expect(putReportAttachmentData.revision_count).to.equal(0); }); }); diff --git a/api/src/models/project-survey-attachments.ts b/api/src/models/project-survey-attachments.ts index b9a8df31f3..eb63acee09 100644 --- a/api/src/models/project-survey-attachments.ts +++ b/api/src/models/project-survey-attachments.ts @@ -11,23 +11,24 @@ const defaultLog = getLogger('models/project-survey-attachments'); */ export class GetAttachmentsData { attachmentsList: any[]; + reportAttachmentsList: any[]; - constructor(attachmentsData?: any) { + constructor(attachmentsData?: any, reportAttachmentsData?: any) { defaultLog.debug({ label: 'GetAttachmentsData', message: 'params', attachmentsData }); - this.attachmentsList = - (attachmentsData?.length && - attachmentsData.map((item: any) => { - return { - id: item.id, - fileName: item.file_name, - fileType: item.file_type || 'Report', - lastModified: moment(item.update_date || item.create_date).toISOString(), - size: item.file_size, - securityToken: item.security_token - }; - })) || - []; + const mapAttachment = (item: any) => { + return { + id: item.id, + fileName: item.file_name, + fileType: item.file_type || 'Report', + lastModified: moment(item.update_date || item.create_date).toISOString(), + size: item.file_size, + status: item.status + }; + }; + + this.attachmentsList = (attachmentsData?.length && attachmentsData.map(mapAttachment)) || []; + this.reportAttachmentsList = (reportAttachmentsData?.length && reportAttachmentsData.map(mapAttachment)) || []; } } @@ -56,7 +57,7 @@ export class PutReportAttachmentMetadata extends PostReportAttachmentMetadata { constructor(obj?: any) { super(obj); - this.revision_count = (obj && obj?.revision_count) || null; + this.revision_count = (obj && obj?.revision_count) || 0; } } diff --git a/api/src/models/project-update.test.ts b/api/src/models/project-update.test.ts index 85293395a0..0a30417da7 100644 --- a/api/src/models/project-update.test.ts +++ b/api/src/models/project-update.test.ts @@ -350,17 +350,13 @@ describe('PutFundingSource', () => { before(() => { data = new PutFundingSource({ - fundingSources: [ - { - id: 1, - investment_action_category: 1, - agency_project_id: 'agency project id', - funding_amount: 20, - start_date: '2020/04/04', - end_date: '2020/05/05', - revision_count: 1 - } - ] + id: 1, + investment_action_category: 1, + agency_project_id: 'agency project id', + funding_amount: 20, + start_date: '2020/04/04', + end_date: '2020/05/05', + revision_count: 1 }); }); diff --git a/api/src/models/project-update.ts b/api/src/models/project-update.ts index 0130eddd31..34441593af 100644 --- a/api/src/models/project-update.ts +++ b/api/src/models/project-update.ts @@ -117,15 +117,30 @@ export class PutFundingSource { constructor(obj?: any) { defaultLog.debug({ label: 'PutFundingSource', message: 'params', obj }); - const fundingSource = obj?.fundingSources?.length && obj.fundingSources[0]; - - this.id = fundingSource?.id || null; - this.investment_action_category = fundingSource?.investment_action_category || null; - this.agency_project_id = fundingSource?.agency_project_id || null; - this.funding_amount = fundingSource?.funding_amount || null; - this.start_date = fundingSource?.start_date || null; - this.end_date = fundingSource?.end_date || null; - this.revision_count = fundingSource?.revision_count ?? null; + this.id = obj?.id || null; + this.investment_action_category = obj?.investment_action_category || null; + this.agency_project_id = obj?.agency_project_id || null; + this.funding_amount = obj?.funding_amount || null; + this.start_date = obj?.start_date || null; + this.end_date = obj?.end_date || null; + this.revision_count = obj?.revision_count ?? null; + } +} + +/** + * Processes PUT /project funding data + * + * @export + * @class PostFundingData + */ +export class PutFundingData { + fundingSources: PutFundingSource[]; + + constructor(obj?: any) { + defaultLog.debug({ label: 'PostFundingData', message: 'params', obj }); + + this.fundingSources = + (obj?.fundingSources?.length && obj.fundingSources.map((item: any) => new PutFundingSource(item))) || []; } } diff --git a/api/src/models/project-view.test.ts b/api/src/models/project-view.test.ts index bf68d2862d..f7347cfb33 100644 --- a/api/src/models/project-view.test.ts +++ b/api/src/models/project-view.test.ts @@ -2,14 +2,15 @@ import { expect } from 'chai'; import { describe } from 'mocha'; import { COMPLETION_STATUS } from '../constants/status'; import { + GetAttachmentsData, GetCoordinatorData, GetFundingData, GetIUCNClassificationData, GetLocationData, GetObjectivesData, GetPartnershipsData, - GetPermitData, - GetProjectData + GetProjectData, + GetReportAttachmentsData } from './project-view'; describe('GetProjectData', () => { @@ -110,7 +111,8 @@ describe('GetObjectivesData', () => { const obj = { objectives: 'these are the project objectives', - caveats: 'these are some interesting caveats' + caveats: 'these are some interesting caveats', + revision_count: 'revision' }; before(() => { @@ -124,6 +126,10 @@ describe('GetObjectivesData', () => { it('sets caveats', function () { expect(projectObjectivesData.caveats).to.equal(obj.caveats); }); + + it('sets revision_count', function () { + expect(projectObjectivesData.revision_count).to.equal(obj.revision_count); + }); }); }); @@ -164,7 +170,8 @@ describe('GetCoordinatorData', () => { coordinator_last_name: 'last', coordinator_email_address: 'email@example.com', coordinator_agency_name: 'agency', - coordinator_public: true + coordinator_public: true, + revision_count: 'count' }; before(() => { @@ -190,43 +197,9 @@ describe('GetCoordinatorData', () => { it('sets share_contact_details', function () { expect(projectCoordinatorData.share_contact_details).to.equal('true'); }); - }); -}); - -describe('GetPermitData', () => { - describe('No values provided', () => { - let projectPermitData: GetPermitData; - - before(() => { - projectPermitData = new GetPermitData((null as unknown) as any[]); - }); - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([]); - }); - }); - - describe('All values provided', () => { - let projectPermitData: GetPermitData; - - const permits = [ - { - number: '1', - type: 'permit type' - } - ]; - - before(() => { - projectPermitData = new GetPermitData(permits); - }); - - it('sets permits', function () { - expect(projectPermitData.permits).to.eql([ - { - permit_number: '1', - permit_type: 'permit type' - } - ]); + it('sets revision_count', function () { + expect(projectCoordinatorData.revision_count).to.equal('count'); }); }); }); @@ -284,11 +257,13 @@ describe('GetLocationData', () => { const locationDataObj = [ { location_description, - geometry + geometry, + revision_count: 'count' }, { location_description, - geometry + geometry, + revision_count: 'count' } ]; @@ -303,6 +278,10 @@ describe('GetLocationData', () => { it('sets the geometry', function () { expect(locationData.geometry).to.eql(geometry); }); + + it('sets revision_count', function () { + expect(locationData.revision_count).to.equal('count'); + }); }); }); @@ -366,7 +345,7 @@ describe('GetFundingData', () => { projectFundingData = new GetFundingData((null as unknown) as any[]); }); - it('sets permits', function () { + it('sets funding sources', function () { expect(projectFundingData.fundingSources).to.eql([]); }); }); @@ -378,7 +357,7 @@ describe('GetFundingData', () => { projectFundingData = new GetFundingData([]); }); - it('sets classification details', function () { + it('sets funding sources', function () { expect(projectFundingData.fundingSources).to.eql([]); }); }); @@ -405,7 +384,7 @@ describe('GetFundingData', () => { projectFundingData = new GetFundingData(fundings); }); - it('sets permits', function () { + it('sets funding sources', function () { expect(projectFundingData.fundingSources).to.eql(fundings); }); }); @@ -501,3 +480,200 @@ describe('GetPartnershipsData', () => { }); }); }); + +describe('GetAttachmentsData', () => { + describe('No values provided', () => { + let data: GetAttachmentsData; + + before(() => { + data = new GetAttachmentsData((null as unknown) as any[]); + }); + + it('sets attachmentDetails', function () { + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('Empty arrays as values provided', () => { + let data: GetAttachmentsData; + + before(() => { + data = new GetAttachmentsData([]); + }); + + it('sets attachmentDetails', function () { + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('some attachmentDetails values provided', () => { + let data: GetAttachmentsData; + + const attachmentDetails = [{ file_name: 1 }, { file_name: 2 }]; + + before(() => { + data = new GetAttachmentsData(attachmentDetails); + }); + + it('sets file_name', function () { + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + file_type: undefined, + title: undefined, + description: undefined, + key: undefined, + file_size: undefined + }, + { + file_name: 2, + file_type: undefined, + title: undefined, + description: undefined, + key: undefined, + file_size: undefined + } + ]); + }); + }); + + describe('all attachmentDetails values provided', () => { + let data: GetAttachmentsData; + + const attachmentDetails = [ + { + file_name: 1, + file_type: 'type', + title: 'title', + description: 'descript', + file_size: 'file_size', + key: 'key' + }, + { + file_name: 2, + file_type: 'type', + title: 'title', + description: 'descript', + file_size: 'file_size', + key: 'key' + } + ]; + + before(() => { + data = new GetAttachmentsData(attachmentDetails); + }); + + it('sets all fields', function () { + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + file_type: 'type', + title: 'title', + description: 'descript', + key: 'key', + file_size: 'file_size' + }, + { + file_name: 2, + file_type: 'type', + title: 'title', + description: 'descript', + key: 'key', + file_size: 'file_size' + } + ]); + }); + }); +}); + +describe('GetReportAttachmentsData', () => { + describe('No values provided', () => { + it('sets attachmentDetails', function () { + const data: GetReportAttachmentsData = new GetReportAttachmentsData((null as unknown) as any[]); + + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('Empty arrays as values provided', () => { + it('sets attachmentDetails', function () { + const data: GetReportAttachmentsData = new GetReportAttachmentsData([]); + + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('some attachmentDetails asdasdsadsasd values provided', () => { + it('sets file_name', function () { + const attachmentDetails = [{ file_name: 1 }, { file_name: 2 }]; + + const data: GetReportAttachmentsData = new GetReportAttachmentsData(attachmentDetails); + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + title: undefined, + year: undefined, + description: undefined, + key: undefined, + file_size: undefined + }, + { + file_name: 2, + title: undefined, + year: undefined, + description: undefined, + key: undefined, + file_size: undefined + } + ]); + }); + }); + + describe('all attachmentDetails values provided', () => { + it('sets all fields', function () { + const attachmentDetails = [ + { + file_name: 1, + title: 'title', + year: '1', + description: 'descript', + file_size: 'size', + key: 'key', + authors: [{ author: 'author' }] + }, + { + file_name: 2, + file_type: 'type', + title: 'title', + year: '2', + description: 'descript', + file_size: 'size', + key: 'key', + authors: [{ author: 'author' }] + } + ]; + const data: GetReportAttachmentsData = new GetReportAttachmentsData(attachmentDetails); + + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + title: 'title', + year: '1', + description: 'descript', + key: 'key', + file_size: 'size', + authors: [{ author: 'author' }] + }, + { + file_name: 2, + title: 'title', + year: '2', + description: 'descript', + key: 'key', + file_size: 'size', + authors: [{ author: 'author' }] + } + ]); + }); + }); +}); diff --git a/api/src/models/project-view.ts b/api/src/models/project-view.ts index a577f15c42..6d9f2cad00 100644 --- a/api/src/models/project-view.ts +++ b/api/src/models/project-view.ts @@ -5,7 +5,6 @@ import { COMPLETION_STATUS } from '../constants/status'; export interface IGetProject { id: number; coordinator: GetCoordinatorData; - permit: GetPermitData; project: GetProjectData; objectives: GetObjectivesData; location: GetLocationData; @@ -29,7 +28,6 @@ export class GetProjectData { end_date: string; comments: string; completion_status: string; - publish_date: string; revision_count: number; constructor(projectData?: any, activityData?: any[]) { @@ -46,7 +44,6 @@ export class GetProjectData { moment(projectData.end_date).endOf('day').isBefore(moment()) && COMPLETION_STATUS.COMPLETED) || COMPLETION_STATUS.ACTIVE; - this.publish_date = String(projectData?.publish_date || ''); this.revision_count = projectData?.revision_count ?? null; } } @@ -93,33 +90,6 @@ export class GetCoordinatorData { } } -export interface IGetPermit { - permit_number: string; - permit_type: string; -} - -/** - * Pre-processes GET /projects/{id} permit data - * - * @export - * @class GetPermitData - */ -export class GetPermitData { - permits: IGetPermit[]; - - constructor(permitData?: any[]) { - this.permits = - (permitData?.length && - permitData.map((item: any) => { - return { - permit_number: item.number, - permit_type: item.type - }; - })) || - []; - } -} - /** * Pre-processes GET /projects/{id} location data * @@ -223,3 +193,80 @@ export class GetPartnershipsData { (stakeholder_partnerships?.length && stakeholder_partnerships.map((item: any) => item.partnership_name)) || []; } } + +interface IGetAttachmentsSource { + file_name: string; + file_type: string; + title: string; + description: string; + key: string; + file_size: string; +} + +/** + * Pre-processes GET /projects/{id} attachments data + * + * @export + * @class GetAttachmentsData + */ +export class GetAttachmentsData { + attachmentDetails: IGetAttachmentsSource[]; + + constructor(attachments?: any[]) { + this.attachmentDetails = + (attachments?.length && + attachments.map((item: any) => { + return { + file_name: item.file_name, + file_type: item.file_type, + title: item.title, + description: item.description, + key: item.key, + file_size: item.file_size + }; + })) || + []; + } +} + +interface IGetReportAttachmentsSource { + file_name: string; + title: string; + year: string; + description: string; + key: string; + file_size: string; + authors?: { author: string }[]; +} + +/** + * Pre-processes GET /projects/{id} report attachments data + * + * @export + * @class GetReportAttachmentsData + */ +export class GetReportAttachmentsData { + attachmentDetails: IGetReportAttachmentsSource[]; + + constructor(attachments?: any[]) { + this.attachmentDetails = + (attachments?.length && + attachments.map((item: any) => { + const attachmentItem = { + file_name: item.file_name, + title: item.title, + year: item.year, + description: item.description, + key: item.key, + file_size: item.file_size + }; + + if (item.authors?.length) { + attachmentItem['authors'] = item.authors; + } + + return attachmentItem; + })) || + []; + } +} diff --git a/api/src/models/public/project.test.ts b/api/src/models/public/project.test.ts deleted file mode 100644 index bc47b0c2e5..0000000000 --- a/api/src/models/public/project.test.ts +++ /dev/null @@ -1,105 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { GetPublicCoordinatorData } from './project'; - -describe('GetPublicCoordinatorData', () => { - describe('No values provided', () => { - let projectCoordinatorData: GetPublicCoordinatorData; - - before(() => { - projectCoordinatorData = new GetPublicCoordinatorData(null); - }); - - it('sets first_name', function () { - expect(projectCoordinatorData.first_name).to.equal(''); - }); - - it('sets last_name', function () { - expect(projectCoordinatorData.last_name).to.equal(''); - }); - - it('sets email_address', function () { - expect(projectCoordinatorData.email_address).to.equal(''); - }); - - it('sets coordinator_agency', function () { - expect(projectCoordinatorData.coordinator_agency).to.equal(''); - }); - - it('sets share_contact_details', function () { - expect(projectCoordinatorData.share_contact_details).to.equal('false'); - }); - }); - - describe('All values provided where coordinator public is true', () => { - let projectCoordinatorData: GetPublicCoordinatorData; - - const obj = { - coordinator_first_name: 'first', - coordinator_last_name: 'last', - coordinator_email_address: 'email@example.com', - coordinator_agency_name: 'agency', - coordinator_public: true - }; - - before(() => { - projectCoordinatorData = new GetPublicCoordinatorData(obj); - }); - - it('sets first_name', function () { - expect(projectCoordinatorData.first_name).to.equal(obj.coordinator_first_name); - }); - - it('sets last_name', function () { - expect(projectCoordinatorData.last_name).to.equal(obj.coordinator_last_name); - }); - - it('sets email_address', function () { - expect(projectCoordinatorData.email_address).to.equal(obj.coordinator_email_address); - }); - - it('sets coordinator_agency', function () { - expect(projectCoordinatorData.coordinator_agency).to.equal(obj.coordinator_agency_name); - }); - - it('sets share_contact_details', function () { - expect(projectCoordinatorData.share_contact_details).to.equal('true'); - }); - }); - - describe('All values provided where coordinator public is false', () => { - let projectCoordinatorData: GetPublicCoordinatorData; - - const obj = { - coordinator_first_name: 'first', - coordinator_last_name: 'last', - coordinator_email_address: 'email@example.com', - coordinator_agency_name: 'agency', - coordinator_public: false - }; - - before(() => { - projectCoordinatorData = new GetPublicCoordinatorData(obj); - }); - - it('sets first_name', function () { - expect(projectCoordinatorData.first_name).to.equal(''); - }); - - it('sets last_name', function () { - expect(projectCoordinatorData.last_name).to.equal(''); - }); - - it('sets email_address', function () { - expect(projectCoordinatorData.email_address).to.equal(''); - }); - - it('sets coordinator_agency', function () { - expect(projectCoordinatorData.coordinator_agency).to.equal(obj.coordinator_agency_name); - }); - - it('sets share_contact_details', function () { - expect(projectCoordinatorData.share_contact_details).to.equal('false'); - }); - }); -}); diff --git a/api/src/models/public/project.ts b/api/src/models/public/project.ts deleted file mode 100644 index e548491774..0000000000 --- a/api/src/models/public/project.ts +++ /dev/null @@ -1,58 +0,0 @@ -import moment from 'moment'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('models/public/project'); - -/** - * Pre-processes GET /projects/{id} coordinator data for public (published) projects - * - * @export - * @class GetPublicCoordinatorData - */ -export class GetPublicCoordinatorData { - first_name: string; - last_name: string; - email_address: string; - coordinator_agency: string; - share_contact_details: string; - - constructor(obj?: any) { - defaultLog.debug({ label: 'GetPublicCoordinatorData', message: 'params', obj }); - - const isCoordinatorDataPublic = obj?.coordinator_public; - - this.first_name = (isCoordinatorDataPublic && obj?.coordinator_first_name) || ''; - this.last_name = (isCoordinatorDataPublic && obj?.coordinator_last_name) || ''; - this.email_address = (isCoordinatorDataPublic && obj?.coordinator_email_address) || ''; - this.coordinator_agency = obj?.coordinator_agency_name || ''; - this.share_contact_details = obj?.coordinator_public ? 'true' : 'false'; - } -} - -/** - * Pre-processes GET public (published) project attachments data - * - * @export - * @class GetPublicAttachmentsData - */ -export class GetPublicAttachmentsData { - attachmentsList: any[]; - - constructor(attachmentsData?: any) { - defaultLog.debug({ label: 'GetPublicAttachmentsData', message: 'params', attachmentsData }); - - this.attachmentsList = - (attachmentsData?.length && - attachmentsData.map((item: any) => { - return { - id: item.id, - fileName: item.file_name, - fileType: item.file_type || 'Report', - lastModified: moment(item.update_date || item.create_date).toISOString(), - size: item.file_size, - securityToken: item.is_secured ? 'true' : 'false' - }; - })) || - []; - } -} diff --git a/api/src/models/summaryresults-create.ts b/api/src/models/summaryresults-create.ts index 34d8ecf144..e07eefafbc 100644 --- a/api/src/models/summaryresults-create.ts +++ b/api/src/models/summaryresults-create.ts @@ -10,38 +10,51 @@ const defaultLog = getLogger('models/summary-results-create'); */ export class PostSummaryDetails { study_area_id: string; + population_unit: string; + block_sample_unit_id: string; parameter: string; stratum: string; - parameter_value: number; - parameter_estimate: number; + observed: number; + estimated: number; + sightability_model: string; + sightability_correction_factor: number; standard_error: number; coefficient_variation: number; confidence_level_percent: number; - confidence_limit_upper: number; confidence_limit_lower: number; + confidence_limit_upper: number; total_area_survey_sqm: number; - kilometres_surveyed: number; - sightability_model: string; + area_flown: number; + total_kilometers_surveyed: number; + best_parameter_flag: string; outlier_blocks_removed: string; - analysis_method: string; + total_marked_animals_observed: number; + marked_animals_available: number; + parameter_comments: number; constructor(obj?: any) { defaultLog.debug({ label: 'PostSummaryDetails', message: 'params', obj }); - this.study_area_id = obj?.study_area_id || null; + this.population_unit = obj?.population_unit || null; + this.block_sample_unit_id = obj?.block_sample_unit_id || null; this.parameter = obj?.parameter || null; this.stratum = obj?.stratum || null; - this.parameter_value = obj?.parameter_value || null; - this.parameter_estimate = obj?.parameter_estimate || null; + this.observed = obj?.observed || null; + this.estimated = obj?.estimated || null; + this.sightability_model = obj?.sightability_model || null; + this.sightability_correction_factor = obj?.sightability_correction_factor || null; this.standard_error = obj?.standard_error || null; this.coefficient_variation = obj?.coefficient_variation || null; this.confidence_level_percent = obj?.confidence_level_percent || null; - this.confidence_limit_upper = obj?.confidence_limit_upper || null; this.confidence_limit_lower = obj?.confidence_limit_lower || null; - this.total_area_survey_sqm = obj?.area || null; - this.kilometres_surveyed = obj?.area_flown || null; - this.sightability_model = obj?.sightability_model || null; + this.confidence_limit_upper = obj?.confidence_limit_upper || null; + this.total_area_survey_sqm = obj?.total_area_survey_sqm || null; + this.area_flown = obj?.area_flown || null; + this.total_kilometers_surveyed = obj?.total_kilometers_surveyed || null; + this.best_parameter_flag = obj?.best_parameter_flag || null; this.outlier_blocks_removed = obj?.outlier_blocks_removed || null; - this.analysis_method = obj?.analysis_method || null; + this.total_marked_animals_observed = obj?.total_marked_animals_observed || null; + this.marked_animals_available = obj?.marked_animals_available || null; + this.parameter_comments = obj?.parameter_comments || null; } } diff --git a/api/src/models/survey-create.test.ts b/api/src/models/survey-create.test.ts index dbc7bbec8d..0024bd9876 100644 --- a/api/src/models/survey-create.test.ts +++ b/api/src/models/survey-create.test.ts @@ -218,12 +218,8 @@ describe('PostPermitData', () => { data = new PostPermitData(null); }); - it('sets permit_number', () => { - expect(data.permit_number).to.eql(null); - }); - - it('sets ancillary_species', () => { - expect(data.permit_type).to.eql(null); + it('sets permits', () => { + expect(data.permits).to.eql([]); }); }); @@ -231,8 +227,12 @@ describe('PostPermitData', () => { let data: PostPermitData; const obj = { - permit_number: '12345', - permit_type: 'permit_type' + permits: [ + { + permit_number: '12345', + permit_type: 'permit_type' + } + ] }; before(() => { @@ -240,11 +240,11 @@ describe('PostPermitData', () => { }); it('sets permit_number', () => { - expect(data.permit_number).to.equal(obj.permit_number); + expect(data.permits[0].permit_number).to.equal(obj.permits[0].permit_number); }); it('sets permit_type', () => { - expect(data.permit_type).to.equal(obj.permit_type); + expect(data.permits[0].permit_type).to.equal(obj.permits[0].permit_type); }); }); }); diff --git a/api/src/models/survey-create.ts b/api/src/models/survey-create.ts index 260f4c5edd..6b02e39cea 100644 --- a/api/src/models/survey-create.ts +++ b/api/src/models/survey-create.ts @@ -49,12 +49,10 @@ export class PostSpeciesData { } export class PostPermitData { - permit_number: string; - permit_type: string; + permits: { permit_number: string; permit_type: string }[]; constructor(obj?: any) { - this.permit_number = obj?.permit_number || null; - this.permit_type = obj?.permit_type || null; + this.permits = obj?.permits || []; } } diff --git a/api/src/models/survey-update.test.ts b/api/src/models/survey-update.test.ts index bab8234439..f7cf17c3f9 100644 --- a/api/src/models/survey-update.test.ts +++ b/api/src/models/survey-update.test.ts @@ -218,12 +218,8 @@ describe('PutPermitData', () => { data = new PutSurveyPermitData(null); }); - it('sets permit_number', () => { - expect(data.permit_number).to.eql(null); - }); - - it('sets ancillary_species', () => { - expect(data.permit_type).to.eql(null); + it('sets permits', () => { + expect(data.permits).to.eql([]); }); }); @@ -231,20 +227,29 @@ describe('PutPermitData', () => { let data: PutSurveyPermitData; const obj = { - permit_number: '12345', - permit_type: 'permit_type' + permits: [ + { + permit_id: 1, + permit_number: '12345', + permit_type: 'permit_type' + } + ] }; before(() => { data = new PutSurveyPermitData(obj); }); + it('sets permit_id', () => { + expect(data.permits[0].permit_id).to.equal(obj.permits[0].permit_id); + }); + it('sets permit_number', () => { - expect(data.permit_number).to.equal('12345'); + expect(data.permits[0].permit_number).to.equal(obj.permits[0].permit_number); }); it('sets permit_type', () => { - expect(data.permit_type).to.equal('permit_type'); + expect(data.permits[0].permit_type).to.equal(obj.permits[0].permit_type); }); }); }); diff --git a/api/src/models/survey-update.ts b/api/src/models/survey-update.ts index 0ca90a9b2d..b4c6f7a6c0 100644 --- a/api/src/models/survey-update.ts +++ b/api/src/models/survey-update.ts @@ -50,12 +50,10 @@ export class PutSurveySpeciesData { } export class PutSurveyPermitData { - permit_number: string; - permit_type: string; + permits: { permit_id?: number; permit_number: string; permit_type: string }[]; constructor(obj?: any) { - this.permit_number = obj?.permit_number || null; - this.permit_type = obj?.permit_type || null; + this.permits = obj?.permits || []; } } diff --git a/api/src/models/survey-view.test.ts b/api/src/models/survey-view.test.ts index a2012c765e..36f5b9b411 100644 --- a/api/src/models/survey-view.test.ts +++ b/api/src/models/survey-view.test.ts @@ -1,9 +1,12 @@ import { expect } from 'chai'; import { describe } from 'mocha'; +import { IPermitModel } from '../repositories/permit-repository'; import { GetAncillarySpeciesData, + GetAttachmentsData, GetFocalSpeciesData, GetPermitData, + GetReportAttachmentsData, GetSurveyData, GetSurveyFundingSources, GetSurveyLocationData, @@ -31,6 +34,10 @@ describe('GetSurveyData', () => { expect(data.start_date).to.equal(null); }); + it('sets geojson', () => { + expect(data.geometry).to.eql([]); + }); + it('sets biologist_first_name', () => { expect(data.biologist_first_name).to.equal(''); }); @@ -48,7 +55,9 @@ describe('GetSurveyData', () => { end_date: '2020/04/04', start_date: '2020/03/03', lead_first_name: 'first', - lead_last_name: 'last' + geojson: [{ data: 'data' }], + lead_last_name: 'last', + revision_count: 'count' }; before(() => { @@ -67,6 +76,10 @@ describe('GetSurveyData', () => { expect(data.start_date).to.equal(obj.start_date); }); + it('sets geojson', () => { + expect(data.geometry).to.equal(obj.geojson); + }); + it('sets biologist_first_name', () => { expect(data.biologist_first_name).to.equal(obj.lead_first_name); }); @@ -74,6 +87,10 @@ describe('GetSurveyData', () => { it('sets biologist_last_name', () => { expect(data.biologist_last_name).to.equal(obj.lead_last_name); }); + + it('sets revision_count', function () { + expect(data.revision_count).to.equal('count'); + }); }); }); @@ -82,7 +99,7 @@ describe('GetFocalSpeciesData', () => { let data: GetFocalSpeciesData; before(() => { - data = new GetFocalSpeciesData([]); + data = new GetFocalSpeciesData(); }); it('sets focal_species', () => { @@ -121,7 +138,7 @@ describe('GetAncillarySpeciesData', () => { let data: GetAncillarySpeciesData; before(() => { - data = new GetAncillarySpeciesData([]); + data = new GetAncillarySpeciesData(); }); it('sets ancillary_species', () => { @@ -160,36 +177,39 @@ describe('GetPermitData', () => { let data: GetPermitData; before(() => { - data = new GetPermitData(null); + data = new GetPermitData(undefined); }); - it('sets permit_number', () => { - expect(data.permit_number).to.equal(''); - }); - - it('sets ancillary_species', () => { - expect(data.permit_type).to.equal(''); + it('sets permits', () => { + expect(data.permits).to.eql([]); }); }); describe('All values provided', () => { let data: GetPermitData; - const obj = { - number: '12345', - type: 'permit_type' - }; + const obj = [ + { + permit_id: 1, + number: '12345', + type: 'permit_type' + } + ] as IPermitModel[]; before(() => { data = new GetPermitData(obj); }); + it('sets permit_id', () => { + expect(data.permits[0].permit_id).to.equal(obj[0].permit_id); + }); + it('sets permit_number', () => { - expect(data.permit_number).to.equal(obj.number); + expect(data.permits[0].permit_number).to.equal(obj[0].number); }); it('sets permit_type', () => { - expect(data.permit_type).to.equal(obj.type); + expect(data.permits[0].permit_type).to.equal(obj[0].type); }); }); }); @@ -199,7 +219,7 @@ describe('GetSurveyFundingSources', () => { let data: GetSurveyFundingSources; before(() => { - data = new GetSurveyFundingSources([]); + data = new GetSurveyFundingSources(); }); it('sets funding_sources', () => { @@ -405,7 +425,7 @@ describe('GetSurveyPurposeAndMethodologyData', () => { }); it('sets additional_details', () => { - expect(data.additional_details).to.equal(null); + expect(data.additional_details).to.equal(''); }); it('sets field_method_id', () => { @@ -419,10 +439,6 @@ describe('GetSurveyPurposeAndMethodologyData', () => { it('sets vantage_code_ids', () => { expect(data.vantage_code_ids).to.eql([]); }); - - it('sets surveyed_all_areas', () => { - expect(data.surveyed_all_areas).to.equal('false'); - }); }); describe('All values provided with first nations id', () => { @@ -434,7 +450,7 @@ describe('GetSurveyPurposeAndMethodologyData', () => { field_method_id: 2, ecological_season_id: 3, vantage_ids: [4, 5], - surveyed_all_areas: true + revision_count: 'count' }; before(() => { @@ -461,8 +477,205 @@ describe('GetSurveyPurposeAndMethodologyData', () => { expect(data.vantage_code_ids).to.eql(obj.vantage_ids); }); - it('sets surveyed_all_areas', () => { - expect(data.surveyed_all_areas).to.eql('true'); + it('sets revision_count', function () { + expect(data.revision_count).to.equal('count'); + }); + }); +}); + +describe('GetAttachmentsData', () => { + describe('No values provided', () => { + let data: GetAttachmentsData; + + before(() => { + data = new GetAttachmentsData((null as unknown) as any[]); + }); + + it('sets attachmentDetails', function () { + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('Empty arrays as values provided', () => { + let data: GetAttachmentsData; + + before(() => { + data = new GetAttachmentsData([]); + }); + + it('sets attachmentDetails', function () { + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('some attachmentDetails values provided', () => { + let data: GetAttachmentsData; + + const attachmentDetails = [{ file_name: 1 }, { file_name: 2 }]; + + before(() => { + data = new GetAttachmentsData(attachmentDetails); + }); + + it('sets file_name', function () { + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + file_type: undefined, + title: undefined, + description: undefined, + key: undefined, + file_size: undefined + }, + { + file_name: 2, + file_type: undefined, + title: undefined, + description: undefined, + key: undefined, + file_size: undefined + } + ]); + }); + }); + + describe('all attachmentDetails values provided', () => { + let data: GetAttachmentsData; + + const attachmentDetails = [ + { + file_name: 1, + file_type: 'type', + title: 'title', + description: 'descript', + file_size: 'file_size', + key: 'key' + }, + { + file_name: 2, + file_type: 'type', + title: 'title', + description: 'descript', + file_size: 'file_size', + key: 'key' + } + ]; + + before(() => { + data = new GetAttachmentsData(attachmentDetails); + }); + + it('sets all fields', function () { + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + file_type: 'type', + title: 'title', + description: 'descript', + key: 'key', + file_size: 'file_size' + }, + { + file_name: 2, + file_type: 'type', + title: 'title', + description: 'descript', + key: 'key', + file_size: 'file_size' + } + ]); + }); + }); +}); + +describe('GetReportAttachmentsData', () => { + describe('No values provided', () => { + it('sets attachmentDetails', function () { + const data: GetReportAttachmentsData = new GetReportAttachmentsData((null as unknown) as any[]); + + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('Empty arrays as values provided', () => { + it('sets attachmentDetails', function () { + const data: GetReportAttachmentsData = new GetReportAttachmentsData([]); + + expect(data.attachmentDetails).to.eql([]); + }); + }); + + describe('some attachmentDetails asdasdsadsasd values provided', () => { + it('sets file_name', function () { + const attachmentDetails = [{ file_name: 1 }, { file_name: 2 }]; + + const data: GetReportAttachmentsData = new GetReportAttachmentsData(attachmentDetails); + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + title: undefined, + year: undefined, + description: undefined, + key: undefined, + file_size: undefined + }, + { + file_name: 2, + title: undefined, + year: undefined, + description: undefined, + key: undefined, + file_size: undefined + } + ]); + }); + }); + + describe('all attachmentDetails values provided', () => { + it('sets all fields', function () { + const attachmentDetails = [ + { + file_name: 1, + title: 'title', + year: '1', + description: 'descript', + file_size: 'size', + key: 'key', + authors: [{ author: 'author' }] + }, + { + file_name: 2, + file_type: 'type', + title: 'title', + year: '2', + description: 'descript', + file_size: 'size', + key: 'key', + authors: [{ author: 'author' }] + } + ]; + const data: GetReportAttachmentsData = new GetReportAttachmentsData(attachmentDetails); + + expect(data.attachmentDetails).to.eql([ + { + file_name: 1, + title: 'title', + year: '1', + description: 'descript', + key: 'key', + file_size: 'size', + authors: [{ author: 'author' }] + }, + { + file_name: 2, + title: 'title', + year: '2', + description: 'descript', + key: 'key', + file_size: 'size', + authors: [{ author: 'author' }] + } + ]); }); }); }); diff --git a/api/src/models/survey-view.ts b/api/src/models/survey-view.ts index ea863ffacd..9abc85d87d 100644 --- a/api/src/models/survey-view.ts +++ b/api/src/models/survey-view.ts @@ -1,4 +1,5 @@ import { Feature } from 'geojson'; +import { IPermitModel } from '../repositories/permit-repository'; export type SurveyObject = { survey_details: GetSurveyData; @@ -18,7 +19,6 @@ export class GetSurveyData { end_date: string; biologist_first_name: string; biologist_last_name: string; - publish_date: string; survey_area_name: string; geometry: Feature[]; revision_count: number; @@ -29,7 +29,6 @@ export class GetSurveyData { this.survey_name = obj?.name || ''; this.start_date = obj?.start_date || null; this.end_date = obj?.end_date || null; - this.publish_date = String(obj?.publish_date || ''); this.geometry = (obj?.geojson?.length && obj.geojson) || []; this.biologist_first_name = obj?.lead_first_name || ''; this.biologist_last_name = obj?.lead_last_name || ''; @@ -70,12 +69,19 @@ export class GetAncillarySpeciesData { } } export class GetPermitData { - permit_number: number; - permit_type: string; - - constructor(obj?: any) { - this.permit_number = obj?.number || ''; - this.permit_type = obj?.type || ''; + permits: { + permit_id: IPermitModel['permit_id']; + permit_number: IPermitModel['number']; + permit_type: IPermitModel['type']; + }[]; + + constructor(obj?: IPermitModel[]) { + this.permits = + obj?.map((item) => ({ + permit_id: item.permit_id, + permit_number: item.number, + permit_type: item.type + })) || []; } } @@ -86,15 +92,13 @@ export class GetSurveyPurposeAndMethodologyData { ecological_season_id: number; revision_count: number; vantage_code_ids: number[]; - surveyed_all_areas: string; constructor(obj?: any) { this.intended_outcome_id = obj?.intended_outcome_id || null; - this.additional_details = obj?.additional_details || null; + this.additional_details = obj?.additional_details || ''; this.field_method_id = obj?.field_method_id || null; this.ecological_season_id = obj?.ecological_season_id || null; this.vantage_code_ids = (obj?.vantage_ids?.length && obj.vantage_ids) || []; - this.surveyed_all_areas = (obj?.surveyed_all_areas && 'true') || 'false'; this.revision_count = obj?.revision_count ?? 0; } } @@ -168,3 +172,80 @@ export class GetSurveyLocationData { this.geometry = (obj?.geojson?.length && obj.geojson) || []; } } + +interface IGetAttachmentsSource { + file_name: string; + file_type: string; + title: string; + description: string; + key: string; + file_size: string; +} + +/** + * Pre-processes GET /surveys/{id} attachments data + * + * @export + * @class GetAttachmentsData + */ +export class GetAttachmentsData { + attachmentDetails: IGetAttachmentsSource[]; + + constructor(attachments?: any[]) { + this.attachmentDetails = + (attachments?.length && + attachments.map((item: any) => { + return { + file_name: item.file_name, + file_type: item.file_type, + title: item.title, + description: item.description, + key: item.key, + file_size: item.file_size + }; + })) || + []; + } +} + +interface IGetReportAttachmentsSource { + file_name: string; + title: string; + year: string; + description: string; + key: string; + file_size: string; + authors?: { author: string }[]; +} + +/** + * Pre-processes GET /surveys/{id} report attachments data + * + * @export + * @class GetReportAttachmentsData + */ +export class GetReportAttachmentsData { + attachmentDetails: IGetReportAttachmentsSource[]; + + constructor(attachments?: any[]) { + this.attachmentDetails = + (attachments?.length && + attachments.map((item: any) => { + const attachmentItem = { + file_name: item.file_name, + title: item.title, + year: item.year, + description: item.description, + key: item.key, + file_size: item.file_size + }; + + if (item.authors?.length) { + attachmentItem['authors'] = item.authors; + } + + return attachmentItem; + })) || + []; + } +} diff --git a/api/src/models/user.ts b/api/src/models/user.ts index 587b1288e7..98e63af509 100644 --- a/api/src/models/user.ts +++ b/api/src/models/user.ts @@ -1,19 +1,17 @@ -import { getLogger } from '../utils/logger'; - -const defaultLog = getLogger('models/user'); - export class UserObject { id: number; user_identifier: string; + user_guid: string | null; + identity_source: string; record_end_date: string; role_ids: number[]; role_names: string[]; constructor(obj?: any) { - defaultLog.debug({ label: 'UserObject', message: 'params', obj }); - this.id = obj?.system_user_id || null; this.user_identifier = obj?.user_identifier || null; + this.user_guid = obj?.user_guid || null; + this.identity_source = obj?.identity_source || null; this.record_end_date = obj?.record_end_date || null; this.role_ids = (obj?.role_ids?.length && obj.role_ids) || []; this.role_names = (obj?.role_names?.length && obj.role_names) || []; @@ -27,8 +25,6 @@ export class ProjectUserObject { project_role_names: string[]; constructor(obj?: any) { - defaultLog.debug({ label: 'ProjectUserObject', message: 'params', obj }); - this.project_id = obj?.project_id || null; this.system_user_id = obj?.system_user_id || null; this.project_role_ids = (obj?.project_role_ids?.length && obj.project_role_ids) || []; diff --git a/api/src/openapi/README.md b/api/src/openapi/README.md index 9903f10736..83f1da2c70 100644 --- a/api/src/openapi/README.md +++ b/api/src/openapi/README.md @@ -2,7 +2,7 @@ ## Whats the difference between [OpenAPI](https://swagger.io/specification/) and [JSON-Schema](https://json-schema.org/)? -OpenAPI and JSON-SChema are identical in many ways, but in general: +OpenAPI and JSON-Schema are identical in many ways, but in general: ### OpenAPI diff --git a/api/src/openapi/schemas/permit-no-sampling.test.ts b/api/src/openapi/schemas/permit-no-sampling.test.ts deleted file mode 100644 index 74c181de31..0000000000 --- a/api/src/openapi/schemas/permit-no-sampling.test.ts +++ /dev/null @@ -1,20 +0,0 @@ -import Ajv from 'ajv'; -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { permitNoSamplingPostBody, permitNoSamplingResponseBody } from './permit-no-sampling'; - -describe('permitNoSamplingPostBody', () => { - const ajv = new Ajv(); - - it('is valid openapi v3 schema', () => { - expect(ajv.validateSchema(permitNoSamplingPostBody)).to.be.true; - }); -}); - -describe('permitNoSamplingResponseBody', () => { - const ajv = new Ajv(); - - it('is valid openapi v3 schema', () => { - expect(ajv.validateSchema(permitNoSamplingResponseBody)).to.be.true; - }); -}); diff --git a/api/src/openapi/schemas/permit-no-sampling.ts b/api/src/openapi/schemas/permit-no-sampling.ts deleted file mode 100644 index e256cdbe93..0000000000 --- a/api/src/openapi/schemas/permit-no-sampling.ts +++ /dev/null @@ -1,72 +0,0 @@ -/** - * Permit no sampling endpoint post body openapi schema. - */ -export const permitNoSamplingPostBody = { - title: 'Non-Sampling Permit Post Object', - type: 'object', - required: ['coordinator', 'permit'], - properties: { - coordinator: { - title: 'Coordinator', - type: 'object', - required: ['first_name', 'last_name', 'email_address', 'coordinator_agency'], - properties: { - first_name: { - type: 'string' - }, - last_name: { - type: 'string' - }, - email_address: { - type: 'string' - }, - coordinator_agency: { - type: 'string' - }, - share_contact_details: { - type: 'string', - enum: ['true', 'false'] - } - } - }, - permit: { - title: 'Non-sampling permits', - type: 'object', - required: ['permits'], - properties: { - permits: { - type: 'array', - items: { - title: 'Non-sampling permit', - type: 'object', - required: ['permit_number'], - additionalProperties: true, - properties: { - permit_number: { - type: 'string' - } - } - } - } - } - } - } -}; - -/** - * Permit no sampling endpoint response body openapi schema. - */ -export const permitNoSamplingResponseBody = { - title: 'Permit no sampling Response Object', - type: 'object', - required: ['ids'], - properties: { - ids: { - type: 'array', - title: 'Permit no sampling ids', - items: { - type: 'number' - } - } - } -}; diff --git a/api/src/openapi/schemas/project.ts b/api/src/openapi/schemas/project.ts index 8c48824117..aba32f4c71 100644 --- a/api/src/openapi/schemas/project.ts +++ b/api/src/openapi/schemas/project.ts @@ -4,7 +4,7 @@ export const projectCreatePostRequestObject = { title: 'Project post request object', type: 'object', - required: ['coordinator', 'permit', 'project', 'location', 'iucn', 'funding'], + required: ['coordinator', 'project', 'location', 'iucn', 'funding'], properties: { coordinator: { title: 'Project coordinator', @@ -29,24 +29,6 @@ export const projectCreatePostRequestObject = { } } }, - permit: { - title: 'Project permits', - type: 'object', - properties: { - permits: { - type: 'array', - items: { - title: 'Project permit', - type: 'object', - properties: { - permit_number: { - type: 'string' - } - } - } - } - } - }, project: { title: 'Project details', type: 'object', @@ -175,7 +157,6 @@ const projectUpdateProperties = { revision_count: { type: 'number' } } }, - permit: { type: 'object', properties: {} }, project: { type: 'object', properties: {} }, objectives: { type: 'object', properties: {} }, location: { type: 'object', properties: {} }, diff --git a/api/src/paths/administrative-activity.test.ts b/api/src/paths/administrative-activity.test.ts index 21b2a6872a..2e153c07ea 100644 --- a/api/src/paths/administrative-activity.test.ts +++ b/api/src/paths/administrative-activity.test.ts @@ -4,7 +4,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import SQL from 'sql-template-strings'; import * as db from '../database/db'; -import { HTTPError } from '../errors/custom-error'; +import { HTTPError } from '../errors/http-error'; import administrative_queries from '../queries/administrative-activity'; import * as keycloak_utils from '../utils/keycloak-utils'; import { getMockDBConnection } from '../__mocks__/db'; diff --git a/api/src/paths/administrative-activity.ts b/api/src/paths/administrative-activity.ts index e5ebeb4964..ec5d6cecd6 100644 --- a/api/src/paths/administrative-activity.ts +++ b/api/src/paths/administrative-activity.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ACCESS_REQUEST_ADMIN_EMAIL } from '../constants/notifications'; import { getAPIUserDBConnection, IDBConnection } from '../database/db'; -import { HTTP400, HTTP500 } from '../errors/custom-error'; +import { HTTP400, HTTP500 } from '../errors/http-error'; import { administrativeActivityResponseObject, hasPendingAdministrativeActivitiesResponseObject diff --git a/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.test.ts b/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.test.ts index 853111a745..b109650665 100644 --- a/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.test.ts +++ b/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.test.ts @@ -3,7 +3,6 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; import { UserObject } from '../../../../models/user'; import { UserService } from '../../../../services/user-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; @@ -18,26 +17,6 @@ describe('approveAccessRequest', () => { sinon.restore(); }); - it('throws an error if the identity source is not supported', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.body = { - userIdentifier: 1, - identitySource: 'fake-source', - roleIds: [1, 3] - }; - - const requestHandler = approve_request.approveAccessRequest(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (error) { - expect((error as HTTPError).status).to.equal(400); - expect((error as HTTPError).message).to.equal('Invalid user identity source'); - } - }); - it('re-throws any error that is thrown', async () => { const expectedError = new Error('test error'); @@ -89,6 +68,8 @@ describe('approveAccessRequest', () => { const mockSystemUser: UserObject = { id: systemUserId, user_identifier: '', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: existingRoleIds, role_names: [] diff --git a/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.ts b/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.ts index ccb8282962..5d9cc20dac 100644 --- a/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.ts +++ b/api/src/paths/administrative-activity/system-access/{administrativeActivityId}/approve.ts @@ -1,13 +1,12 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_IDENTITY_SOURCE } from '../../../../constants/database'; -import { EXTERNAL_BCEID_IDENTITY_SOURCES, EXTERNAL_IDIR_IDENTITY_SOURCES } from '../../../../constants/keycloak'; import { SYSTEM_ROLE } from '../../../../constants/roles'; import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; +import { HTTP400 } from '../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; import { UserService } from '../../../../services/user-service'; -import { convertUserIdentitySource } from '../../../../utils/keycloak-utils'; +import { coerceUserIdentitySource } from '../../../../utils/keycloak-utils'; import { getLogger } from '../../../../utils/logger'; import { ADMINISTRATIVE_ACTIVITY_STATUS_TYPE } from '../../../administrative-activities'; import { updateAdministrativeActivity } from '../../../administrative-activity'; @@ -29,12 +28,7 @@ export const PUT: Operation = [ ]; const UniqueUserIdentitySources = Array.from( - new Set([ - SYSTEM_IDENTITY_SOURCE.IDIR, - SYSTEM_IDENTITY_SOURCE.BCEID, - ...EXTERNAL_IDIR_IDENTITY_SOURCES, - ...EXTERNAL_BCEID_IDENTITY_SOURCES - ]) + new Set([SYSTEM_IDENTITY_SOURCE.IDIR, SYSTEM_IDENTITY_SOURCE.BCEID_BASIC, SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS]) ); // Contains both uppercase and lowercase versions of the identity sources @@ -67,8 +61,12 @@ PUT.apiDoc = { 'application/json': { schema: { type: 'object', - required: ['userIdentifier', 'identitySource'], + required: ['userGuid', 'userIdentifier', 'identitySource'], properties: { + userGuid: { + type: 'string', + description: 'The GUID for the user.' + }, userIdentifier: { type: 'string', description: 'The user identifier for the user.' @@ -116,10 +114,11 @@ export function approveAccessRequest(): RequestHandler { return async (req, res) => { const administrativeActivityId = Number(req.params.administrativeActivityId); + const userGuid = req.body.userGuid; const userIdentifier = req.body.userIdentifier; // Convert identity sources that have multiple variations (ie: BCEID) into a single value supported by this app - const identitySource = convertUserIdentitySource(req.body.identitySource); + const identitySource = req.body.identitySource && coerceUserIdentitySource(req.body.identitySource); if (!identitySource) { throw new HTTP400('Invalid user identity source', [ @@ -137,7 +136,7 @@ export function approveAccessRequest(): RequestHandler { const userService = new UserService(connection); // Get the system user (adding or activating them if they already existed). - const systemUserObject = await userService.ensureSystemUser(userIdentifier, identitySource); + const systemUserObject = await userService.ensureSystemUser(userGuid, userIdentifier, identitySource); // Filter out any system roles that have already been added to the user const rolesIdsToAdd = roleIds.filter((roleId) => !systemUserObject.role_ids.includes(roleId)); diff --git a/api/src/paths/codes.test.ts b/api/src/paths/codes.test.ts index f26aec385a..8866448c32 100644 --- a/api/src/paths/codes.test.ts +++ b/api/src/paths/codes.test.ts @@ -3,7 +3,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../database/db'; -import { HTTPError } from '../errors/custom-error'; +import { HTTPError } from '../errors/http-error'; import { CodeService } from '../services/code-service'; import { getMockDBConnection } from '../__mocks__/db'; import * as codes from './codes'; diff --git a/api/src/paths/codes.ts b/api/src/paths/codes.ts index c6ceb9ac61..623f45cb02 100644 --- a/api/src/paths/codes.ts +++ b/api/src/paths/codes.ts @@ -1,7 +1,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { getAPIUserDBConnection } from '../database/db'; -import { HTTP500 } from '../errors/custom-error'; +import { HTTP500 } from '../errors/http-error'; import { CodeService } from '../services/code-service'; import { getLogger } from '../utils/logger'; diff --git a/api/src/paths/draft.test.ts b/api/src/paths/draft.test.ts index dedb8451c4..d5737ba31d 100644 --- a/api/src/paths/draft.test.ts +++ b/api/src/paths/draft.test.ts @@ -5,7 +5,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import SQL from 'sql-template-strings'; import * as db from '../database/db'; -import { HTTPError } from '../errors/custom-error'; +import { HTTPError } from '../errors/http-error'; import draft_queries from '../queries/project/draft'; import { getMockDBConnection } from '../__mocks__/db'; import * as draft from './draft'; diff --git a/api/src/paths/draft.ts b/api/src/paths/draft.ts index 9599a74735..dbd62494b8 100644 --- a/api/src/paths/draft.ts +++ b/api/src/paths/draft.ts @@ -1,8 +1,8 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../constants/roles'; +import { SYSTEM_ROLE } from '../constants/roles'; import { getDBConnection } from '../database/db'; -import { HTTP400 } from '../errors/custom-error'; +import { HTTP400 } from '../errors/http-error'; import { draftResponseObject } from '../openapi/schemas/draft'; import { queries } from '../queries/queries'; import { authorizeRequestHandler } from '../request-handlers/security/authorization'; @@ -11,13 +11,12 @@ import { getLogger } from '../utils/logger'; const defaultLog = getLogger('paths/draft'); export const PUT: Operation = [ - authorizeRequestHandler((req) => { + authorizeRequestHandler(() => { return { and: [ { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR, SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' } ] }; @@ -26,13 +25,12 @@ export const PUT: Operation = [ ]; export const POST: Operation = [ - authorizeRequestHandler((req) => { + authorizeRequestHandler(() => { return { and: [ { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR, SYSTEM_ROLE.DATA_ADMINISTRATOR], + discriminator: 'SystemRole' } ] }; @@ -105,7 +103,7 @@ POST.apiDoc = { }; PUT.apiDoc = { - description: 'Update a Draft.', + description: 'Update a Draft', tags: ['draft'], security: [ { diff --git a/api/src/paths/draft/{draftId}/delete.test.ts b/api/src/paths/draft/{draftId}/delete.test.ts index 4029c607ba..edcd868d33 100644 --- a/api/src/paths/draft/{draftId}/delete.test.ts +++ b/api/src/paths/draft/{draftId}/delete.test.ts @@ -1,61 +1,23 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; +import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; -import draft_queries from '../../../queries/project/draft'; +import { HTTPError } from '../../../errors/http-error'; +import { ProjectService } from '../../../services/project-service'; import { getMockDBConnection } from '../../../__mocks__/db'; -import * as deleteDraftProject from './delete'; +import * as del from './delete'; chai.use(sinonChai); -describe('delete a draft project', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - draftId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - +describe('getRules', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no draftId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = deleteDraftProject.deleteDraft(); - await result( - { ...sampleReq, params: { ...sampleReq.params, draftId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `draftId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for deleteDraftSQL', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -63,38 +25,61 @@ describe('delete a draft project', () => { } }); - sinon.stub(draft_queries, 'deleteDraftSQL').returns(null); + const expectedError = new Error('cannot process request'); + sinon.stub(ProjectService.prototype, 'deleteDraft').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: {} + } as any; try { - const result = deleteDraftProject.deleteDraft(); + const result = del.deleteDraft(); await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return the row count of the removed draft project on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rowCount: 1 }); - + it('should succeed with valid data', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(draft_queries, 'deleteDraftSQL').returns(SQL`something`); - - const result = deleteDraftProject.deleteDraft(); + const sampleReq = { + keycloak_token: {}, + body: {}, + params: {} + } as any; + + const deleteDraftStub = sinon + .stub(ProjectService.prototype, 'deleteDraft') + .resolves(({ rowCount: 1 } as unknown) as QueryResult); + + const expectedResponse = 1; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = del.deleteDraft(); - expect(actualResult).to.eql(1); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(deleteDraftStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/draft/{draftId}/delete.ts b/api/src/paths/draft/{draftId}/delete.ts index 1638bb1f2a..fe5dba16e6 100644 --- a/api/src/paths/draft/{draftId}/delete.ts +++ b/api/src/paths/draft/{draftId}/delete.ts @@ -2,9 +2,8 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../../../constants/roles'; import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; -import { queries } from '../../../queries/queries'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; +import { ProjectService } from '../../../services/project-service'; import { getLogger } from '../../../utils/logger'; const defaultLog = getLogger('/api/draft/{draftId}/delete'); @@ -14,7 +13,7 @@ export const DELETE: Operation = [ return { and: [ { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR], + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR, SYSTEM_ROLE.DATA_ADMINISTRATOR], discriminator: 'SystemRole' } ] @@ -36,7 +35,8 @@ DELETE.apiDoc = { in: 'path', name: 'draftId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -74,26 +74,18 @@ export function deleteDraft(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'Delete draft', message: 'params', req_params: req.params }); - if (!req.params.draftId) { - throw new HTTP400('Missing required path param `draftId`'); - } - const connection = getDBConnection(req['keycloak_token']); try { await connection.open(); - const deleteDraftSQLStatement = queries.project.draft.deleteDraftSQL(Number(req.params.draftId)); - - if (!deleteDraftSQLStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } + const projectService = new ProjectService(connection); - const result = await connection.query(deleteDraftSQLStatement.text, deleteDraftSQLStatement.values); + const response = await projectService.deleteDraft(Number(req.params.draftId)); await connection.commit(); - return res.status(200).json(result && result.rowCount); + return res.status(200).json(response && response.rowCount); } catch (error) { defaultLog.error({ label: 'deleteDraft', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/draft/{draftId}/get.test.ts b/api/src/paths/draft/{draftId}/get.test.ts index af6b6dec5d..3c7da1b210 100644 --- a/api/src/paths/draft/{draftId}/get.test.ts +++ b/api/src/paths/draft/{draftId}/get.test.ts @@ -2,43 +2,21 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; -import draft_queries from '../../../queries/project/draft'; +import { HTTPError } from '../../../errors/http-error'; +import { ProjectService } from '../../../services/project-service'; import { getMockDBConnection } from '../../../__mocks__/db'; -import * as viewDraftProject from './get'; +import * as get from './get'; chai.use(sinonChai); -describe('gets a draft project', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - draftId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - +describe('getRules', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no sql statement returned for getDraftSQL', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -46,60 +24,67 @@ describe('gets a draft project', () => { } }); - sinon.stub(draft_queries, 'getDraftSQL').returns(null); + const expectedError = new Error('cannot process request'); + sinon.stub(ProjectService.prototype, 'getSingleDraft').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: {} + } as any; try { - const result = viewDraftProject.getSingleDraft(); + const result = get.getSingleDraft(); await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return the draft project on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ id: 1 }] }); - + it('should succeed with valid data', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(draft_queries, 'getDraftSQL').returns(SQL`something`); - - const result = viewDraftProject.getSingleDraft(); + const sampleReq = { + keycloak_token: {}, + body: {}, + params: {} + } as any; - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql({ id: 1 }); - }); - - it('should return null if the draft project does not exist', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: undefined }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery + const getSingleDraftStub = sinon.stub(ProjectService.prototype, 'getSingleDraft').resolves({ + id: 1, + name: 'string', + data: { any: 1 } }); - sinon.stub(draft_queries, 'getDraftSQL').returns(SQL`something`); - - const result = viewDraftProject.getSingleDraft(); + const expectedResponse = { + id: 1, + name: 'string', + data: { any: 1 } + }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = get.getSingleDraft(); - expect(actualResult).to.eql(null); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(getSingleDraftStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/draft/{draftId}/get.ts b/api/src/paths/draft/{draftId}/get.ts index ab7a09c07d..e2c21d5bef 100644 --- a/api/src/paths/draft/{draftId}/get.ts +++ b/api/src/paths/draft/{draftId}/get.ts @@ -2,10 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../../../constants/roles'; import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; import { draftGetResponseObject } from '../../../openapi/schemas/draft'; -import { queries } from '../../../queries/queries'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; +import { ProjectService } from '../../../services/project-service'; import { getLogger } from '../../../utils/logger'; const defaultLog = getLogger('paths/draft/{draftId}'); @@ -15,7 +14,7 @@ export const GET: Operation = [ return { and: [ { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR], + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR, SYSTEM_ROLE.DATA_ADMINISTRATOR], discriminator: 'SystemRole' } ] @@ -37,7 +36,8 @@ GET.apiDoc = { in: 'path', name: 'draftId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -81,21 +81,15 @@ export function getSingleDraft(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); try { - const getDraftSQLStatement = queries.project.draft.getDraftSQL(Number(req.params.draftId)); - - if (!getDraftSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); - const draftResponse = await connection.query(getDraftSQLStatement.text, getDraftSQLStatement.values); + const projectService = new ProjectService(connection); - await connection.commit(); + const response = await projectService.getSingleDraft(Number(req.params.draftId)); - const draftResult = (draftResponse && draftResponse.rows && draftResponse.rows[0]) || null; + await connection.commit(); - return res.status(200).json(draftResult); + return res.status(200).json(response); } catch (error) { defaultLog.error({ label: 'getSingleDraft', message: 'error', error }); throw error; diff --git a/api/src/paths/drafts.test.ts b/api/src/paths/drafts.test.ts index 63764eaf88..3c1fde444b 100644 --- a/api/src/paths/drafts.test.ts +++ b/api/src/paths/drafts.test.ts @@ -4,7 +4,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import SQL from 'sql-template-strings'; import * as db from '../database/db'; -import { HTTPError } from '../errors/custom-error'; +import { HTTPError } from '../errors/http-error'; import draft_queries from '../queries/project/draft'; import { getMockDBConnection } from '../__mocks__/db'; import * as drafts from './drafts'; diff --git a/api/src/paths/drafts.ts b/api/src/paths/drafts.ts index 78b5236eb9..2e47745fb7 100644 --- a/api/src/paths/drafts.ts +++ b/api/src/paths/drafts.ts @@ -1,7 +1,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { getDBConnection } from '../database/db'; -import { HTTP400 } from '../errors/custom-error'; +import { HTTP400 } from '../errors/http-error'; import { draftResponseObject } from '../openapi/schemas/draft'; import { queries } from '../queries/queries'; import { authorizeRequestHandler } from '../request-handlers/security/authorization'; diff --git a/api/src/paths/dwc/eml.test.ts b/api/src/paths/dwc/eml.test.ts deleted file mode 100644 index faf27db3e7..0000000000 --- a/api/src/paths/dwc/eml.test.ts +++ /dev/null @@ -1,59 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import { EmlService } from '../../services/eml-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; -import { getProjectEml } from './eml'; - -chai.use(sinonChai); - -describe('getProjectEml', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no projectId is provided', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.query = { projectId: undefined }; - - try { - await getProjectEml()(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should throw an error when buildProjectEml fails', async () => { - const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(EmlService.prototype, 'buildProjectEml').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1' - }; - - try { - const requestHandler = getProjectEml(); - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.rollback).to.have.been.called; - expect(dbConnectionObj.release).to.have.been.called; - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); -}); diff --git a/api/src/paths/dwc/eml.ts b/api/src/paths/dwc/eml.ts deleted file mode 100644 index 73daaec1a9..0000000000 --- a/api/src/paths/dwc/eml.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../constants/roles'; -import { getDBConnection } from '../../database/db'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { EmlService } from '../../services/eml-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('paths/project/{projectId}/export/eml'); - -export const GET: Operation = [ - authorizeRequestHandler((req) => { - return { - or: [ - { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.DATA_ADMINISTRATOR], - discriminator: 'SystemRole' - }, - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.query.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - getProjectEml() -]; - -GET.apiDoc = { - description: 'Produces an Ecological Metadata Language (EML) extract for a target data package.', - tags: ['eml', 'dwc'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'query', - name: 'projectId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - }, - { - in: 'query', - name: 'surveyId', - schema: { - type: 'array', - items: { - type: 'integer', - minimum: 1 - } - }, - description: 'Specify which surveys to include in the EML. Defaults to all surveys if none specified.' - }, - { - in: 'query', - name: 'includeSensitive', - schema: { - type: 'string', - enum: ['true', 'false'], - default: 'false' - }, - description: 'Specify if sensitive metadata should be included in the EML. Defaults to false if not specified.' - } - ], - responses: { - 200: { - description: 'Ecological Metadata Language (EML) extract production OK', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['eml'], - properties: { - eml: { - type: 'string', - description: 'Project EML data in XML format' - } - } - }, - encoding: { - eml: { - contentType: 'application/xml; charset=utf-8' - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getProjectEml(): RequestHandler { - return async (req, res) => { - const projectId = Number(req.query.projectId); - - const surveyIds = (req.query.surveyId as string[] | undefined)?.map((item) => Number(item)); - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const emlService = new EmlService({ projectId: projectId }, connection); - - const xmlData = await emlService.buildProjectEml({ - includeSensitiveData: req.query.includeSensitive === 'true' || false, - surveyIds: surveyIds - }); - - await connection.commit(); - - res.setHeader('Access-Control-Expose-Headers', 'Content-Disposition'); - res.attachment(`project_${projectId}_eml.xml`); - res.contentType('application/xml'); - - return res.status(200).json({ eml: xmlData }); - } catch (error) { - defaultLog.error({ label: 'getProjectEml', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/dwc/metadata.test.ts b/api/src/paths/dwc/metadata.test.ts new file mode 100644 index 0000000000..ffd83bea57 --- /dev/null +++ b/api/src/paths/dwc/metadata.test.ts @@ -0,0 +1,88 @@ +import chai, { expect } from 'chai'; +import { GeoJsonProperties } from 'geojson'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../database/db'; +import { HTTPError } from '../../errors/http-error'; +import { OccurrenceService } from '../../services/occurrence-service'; +import { getMockDBConnection } from '../../__mocks__/db'; +import * as metadata from './metadata'; + +chai.use(sinonChai); + +describe('getSpatialMetadataBySubmissionSpatialComponentIds', () => { + const dbConnectionObj = getMockDBConnection(); + + const sampleReq = { + keycloak_token: {}, + body: { + occurrence_submission_id: null + } + } as any; + + let actualResult: any = null; + + const sampleRes = { + status: () => { + return { + json: (result: any) => { + actualResult = result; + } + }; + } + }; + + afterEach(() => { + sinon.restore(); + }); + + it('should throw an error when failed to get metadata', async () => { + sinon.stub(db, 'getDBConnection').returns({ + ...dbConnectionObj, + systemUserId: () => { + return 20; + } + }); + const expectedError = new Error('cannot process request'); + sinon + .stub(OccurrenceService.prototype, 'findSpatialMetadataBySubmissionSpatialComponentIds') + .rejects(expectedError); + + try { + const result = metadata.getSpatialMetadataBySubmissionSpatialComponentIds(); + + await result( + { ...sampleReq, query: { submissionSpatialComponentIds: [1] } }, + (null as unknown) as any, + (null as unknown) as any + ); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal(expectedError.message); + } + }); + + it('should return the occurrences view data on success', async () => { + sinon.stub(db, 'getDBConnection').returns({ + ...dbConnectionObj, + systemUserId: () => { + return 20; + } + }); + + sinon + .stub(OccurrenceService.prototype, 'findSpatialMetadataBySubmissionSpatialComponentIds') + .resolves([({ id: 1 } as unknown) as GeoJsonProperties]); + + const result = metadata.getSpatialMetadataBySubmissionSpatialComponentIds(); + + await result( + { ...sampleReq, query: { submissionSpatialComponentIds: [1] } }, + sampleRes as any, + (null as unknown) as any + ); + + expect(actualResult).to.be.eql([{ id: 1 }]); + }); +}); diff --git a/api/src/paths/dwc/metadata.ts b/api/src/paths/dwc/metadata.ts new file mode 100644 index 0000000000..5c434c135a --- /dev/null +++ b/api/src/paths/dwc/metadata.ts @@ -0,0 +1,100 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { getDBConnection } from '../../database/db'; +import { OccurrenceService } from '../../services/occurrence-service'; +import { getLogger } from '../../utils/logger'; + +const defaultLog = getLogger('paths/dwc/metadata'); + +export const GET: Operation = [getSpatialMetadataBySubmissionSpatialComponentIds()]; + +GET.apiDoc = { + description: 'Retrieves spatial component metadata based on submission spatial component id', + tags: ['spatial'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + description: 'spatial component submission ids', + in: 'query', + name: 'submissionSpatialComponentIds', + schema: { + type: 'array', + items: { + type: 'number', + minimum: 1 + } + }, + required: true + } + ], + responses: { + 200: { + description: 'Spatial metadata response object.', + content: { + 'application/json': { + schema: { + type: 'array', + items: { + type: 'object' + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/401' + }, + 409: { + $ref: '#/components/responses/409' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * Retrieves dataset metadata from Elastic Search. + * + * @returns {RequestHandler} + */ +export function getSpatialMetadataBySubmissionSpatialComponentIds(): RequestHandler { + return async (req, res) => { + const submissionSpatialComponentIds = ((req.query.submissionSpatialComponentIds || []) as string[]).map(Number); + + const connection = getDBConnection(req['keycloak_token']); + + try { + await connection.open(); + + const occurrenceService = new OccurrenceService(connection); + + const response = await occurrenceService.findSpatialMetadataBySubmissionSpatialComponentIds( + submissionSpatialComponentIds + ); + + await connection.commit(); + + res.status(200).json(response); + } catch (error) { + defaultLog.error({ label: 'getSpatialMetadataBySubmissionSpatialComponentIds', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/dwc/process.test.ts b/api/src/paths/dwc/process.test.ts new file mode 100644 index 0000000000..8d9451c966 --- /dev/null +++ b/api/src/paths/dwc/process.test.ts @@ -0,0 +1,99 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../database/db'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; +import * as process from './process'; + +chai.use(sinonChai); + +describe('dwc/process', () => { + describe('process dwc file', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns a 200 if req.body.occurrence_submission_id exists', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + mockReq['keycloak_token'] = 'token'; + + const processFileStub = sinon.stub(ValidationService.prototype, 'processDWCFile').resolves(); + + const requestHandler = process.processDWCFile(); + await requestHandler(mockReq, mockRes, mockNext); + expect(mockRes.statusValue).to.equal(200); + expect(processFileStub).to.have.been.calledOnceWith(mockReq.body.occurrence_submission_id); + expect(mockRes.jsonValue).to.eql({ status: 'success' }); + }); + + it('catches an error on processDWCFile', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const processFileStub = sinon + .stub(ValidationService.prototype, 'processDWCFile') + .throws(new Error('test processDWCFile error')); + const errorServiceStub = sinon.stub(ErrorService.prototype, 'insertSubmissionStatus').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = process.processDWCFile(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(processFileStub).to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test processDWCFile error'); + } + }); + + it('catches an error on insertSubmissionStatus', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const processFileStub = sinon + .stub(ValidationService.prototype, 'processDWCFile') + .throws(new Error('test processDWCFile error')); + const errorServiceStub = sinon + .stub(ErrorService.prototype, 'insertSubmissionStatus') + .throws(new Error('test insertSubmissionStatus error')); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = process.processDWCFile(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(processFileStub).to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test insertSubmissionStatus error'); + } + }); + }); +}); diff --git a/api/src/paths/dwc/process.ts b/api/src/paths/dwc/process.ts new file mode 100644 index 0000000000..05f6dff781 --- /dev/null +++ b/api/src/paths/dwc/process.ts @@ -0,0 +1,131 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_ROLE } from '../../constants/roles'; +import { SUBMISSION_STATUS_TYPE } from '../../constants/status'; +import { getDBConnection } from '../../database/db'; +import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; +import { getLogger } from '../../utils/logger'; + +const defaultLog = getLogger('paths/dwc/process'); + +export const POST: Operation = [ + authorizeRequestHandler((req) => { + return { + and: [ + { + validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], + projectId: Number(req.body.project_id), + discriminator: 'ProjectRole' + } + ] + }; + }), + processDWCFile() +]; + +export const getValidateAPIDoc = (basicDescription: string, successDescription: string, tags: string[]) => { + return { + description: basicDescription, + tags: tags, + security: [ + { + Bearer: [] + } + ], + requestBody: { + description: 'Request body', + content: { + 'application/json': { + schema: { + type: 'object', + required: ['occurrence_submission_id'], + properties: { + occurrence_submission_id: { + description: 'A survey occurrence submission ID', + type: 'integer', + minimum: 1 + } + } + } + } + } + }, + responses: { + 200: { + description: successDescription, + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string' + }, + reason: { + type: 'string' + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } + }; +}; + +POST.apiDoc = { + ...getValidateAPIDoc( + 'Validates and processes a Darwin Core (DWC) Archive survey observation submission.', + 'Validate and process a Darwin Core (DWC) Archive survey observation submission OK', + ['survey', 'observation', 'dwc'] + ) +}; + +export function processDWCFile(): RequestHandler { + return async (req, res) => { + const submissionId = req.body.occurrence_submission_id; + + res.status(200).json({ status: 'success' }); + + const connection = getDBConnection(req['keycloak_token']); + try { + await connection.open(); + + const service = new ValidationService(connection); + + await service.processDWCFile(submissionId); + + await connection.commit(); + } catch (error: any) { + defaultLog.error({ label: 'persistParseErrors', message: 'error', error }); + + // Unexpected error occurred, rolling DB back to safe state + await connection.rollback(); + + // We still want to track that the submission failed to present to the user + const errorService = new ErrorService(connection); + await errorService.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.SYSTEM_ERROR); + await connection.commit(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/dwc/scrape-occurrences.ts b/api/src/paths/dwc/scrape-occurrences.ts deleted file mode 100644 index ac9ef5a07c..0000000000 --- a/api/src/paths/dwc/scrape-occurrences.ts +++ /dev/null @@ -1,275 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../database/db'; -import { HTTP400 } from '../../errors/custom-error'; -import { PostOccurrence } from '../../models/occurrence-create'; -import { queries } from '../../queries/queries'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { getLogger } from '../../utils/logger'; -import { DWCArchive } from '../../utils/media/dwc/dwc-archive-file'; -import { getOccurrenceSubmission, getS3File, prepDWCArchive, sendResponse } from './validate'; - -const defaultLog = getLogger('paths/dwc/scrape-occurrences'); - -export const POST: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.body.project_id), - discriminator: 'ProjectRole' - } - ] - }; - }), - getOccurrenceSubmission(), - getSubmissionOutputS3Key(), - getS3File(), - prepDWCArchive(), - scrapeAndUploadOccurrences(), - sendResponse() -]; - -POST.apiDoc = { - description: 'Scrape information from file into occurrence table.', - tags: ['scrape', 'occurrence'], - security: [ - { - Bearer: [] - } - ], - requestBody: { - description: 'Request body', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['occurrence_submission_id'], - properties: { - project_id: { - type: 'number' - }, - occurrence_submission_id: { - description: 'A survey occurrence submission ID', - type: 'number', - example: 1 - } - } - } - } - } - }, - responses: { - 200: { - description: 'Successfully scraped and uploaded occurrence information.', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['status'], - properties: { - status: { - type: 'string', - enum: ['success', 'failed'] - }, - reason: { - type: 'string' - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/401' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getSubmissionOutputS3Key(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getSubmissionOutputS3Key', message: 'params', files: req.body }); - const occurrence_submission = req['occurrence_submission']; - - req['s3Key'] = occurrence_submission.output_key; - - next(); - }; -} - -export function scrapeAndUploadOccurrences(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'scrapeAndUploadOccurrences', message: 'params', files: req.body }); - - const occurrenceSubmissionId = req.body.occurrence_submission_id; - const dwcArchive: DWCArchive = req['dwcArchive']; - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const { - occurrenceRows, - occurrenceIdHeader, - associatedTaxaHeader, - eventRows, - lifeStageHeader, - sexHeader, - individualCountHeader, - organismQuantityHeader, - organismQuantityTypeHeader, - occurrenceHeaders, - eventIdHeader, - eventDateHeader, - eventVerbatimCoordinatesHeader, - taxonRows, - taxonIdHeader, - vernacularNameHeader - } = getHeadersAndRowsFromFile(dwcArchive); - - const scrapedOccurrences = occurrenceRows?.map((row: any) => { - const occurrenceId = row[occurrenceIdHeader]; - const associatedTaxa = row[associatedTaxaHeader]; - const lifeStage = row[lifeStageHeader]; - const sex = row[sexHeader]; - const individualCount = row[individualCountHeader]; - const organismQuantity = row[organismQuantityHeader]; - const organismQuantityType = row[organismQuantityTypeHeader]; - - const data = { headers: occurrenceHeaders, rows: row }; - - let verbatimCoordinates; - let eventDate; - - eventRows?.forEach((eventRow: any) => { - if (eventRow[eventIdHeader] === occurrenceId) { - eventDate = eventRow[eventDateHeader]; - verbatimCoordinates = eventRow[eventVerbatimCoordinatesHeader]; - } - }); - - let vernacularName; - - taxonRows?.forEach((taxonRow: any) => { - if (taxonRow[taxonIdHeader] === occurrenceId) { - vernacularName = taxonRow[vernacularNameHeader]; - } - }); - - return new PostOccurrence({ - associatedTaxa: associatedTaxa, - lifeStage: lifeStage, - sex: sex, - individualCount: individualCount, - vernacularName: vernacularName, - data, - verbatimCoordinates: verbatimCoordinates, - organismQuantity: organismQuantity, - organismQuantityType: organismQuantityType, - eventDate: eventDate - }); - }); - - await Promise.all( - scrapedOccurrences?.map(async (scrapedOccurrence: any) => { - uploadScrapedOccurrence(occurrenceSubmissionId, scrapedOccurrence, connection); - }) || [] - ); - - await connection.commit(); - - next(); - } catch (error) { - defaultLog.error({ label: 'scrapeAndUploadOccurrences', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -/** - * Upload scraped occurrence data. - * - * @param {number} occurrenceSubmissionId - * @param {any} scrapedOccurrence - * @param {IDBConnection} connection - * @return {*} - */ -export const uploadScrapedOccurrence = async ( - occurrenceSubmissionId: number, - scrapedOccurrence: PostOccurrence, - connection: IDBConnection -) => { - const sqlStatement = queries.occurrence.postOccurrenceSQL(occurrenceSubmissionId, scrapedOccurrence); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL post statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert occurrence data'); - } -}; - -const getHeadersAndRowsFromFile = (dwcArchive: DWCArchive) => { - const eventHeaders = dwcArchive.worksheets.event?.getHeaders(); - const eventRows = dwcArchive.worksheets.event?.getRows(); - - const eventIdHeader = eventHeaders?.indexOf('id') as number; - const eventVerbatimCoordinatesHeader = eventHeaders?.indexOf('verbatimCoordinates') as number; - const eventDateHeader = eventHeaders?.indexOf('eventDate') as number; - - const occurrenceHeaders = dwcArchive.worksheets.occurrence?.getHeaders(); - const occurrenceRows = dwcArchive.worksheets.occurrence?.getRows(); - - const occurrenceIdHeader = occurrenceHeaders?.indexOf('id') as number; - const associatedTaxaHeader = occurrenceHeaders?.indexOf('associatedTaxa') as number; - const lifeStageHeader = occurrenceHeaders?.indexOf('lifeStage') as number; - const sexHeader = occurrenceHeaders?.indexOf('sex') as number; - const individualCountHeader = occurrenceHeaders?.indexOf('individualCount') as number; - const organismQuantityHeader = occurrenceHeaders?.indexOf('organismQuantity') as number; - const organismQuantityTypeHeader = occurrenceHeaders?.indexOf('organismQuantityType') as number; - - const taxonHeaders = dwcArchive.worksheets.taxon?.getHeaders(); - const taxonRows = dwcArchive.worksheets.taxon?.getRows(); - const taxonIdHeader = taxonHeaders?.indexOf('id') as number; - const vernacularNameHeader = taxonHeaders?.indexOf('vernacularName') as number; - - return { - occurrenceRows, - occurrenceIdHeader, - associatedTaxaHeader, - eventRows, - lifeStageHeader, - sexHeader, - individualCountHeader, - organismQuantityHeader, - organismQuantityTypeHeader, - occurrenceHeaders, - eventIdHeader, - eventDateHeader, - eventVerbatimCoordinatesHeader, - taxonRows, - taxonIdHeader, - vernacularNameHeader - }; -}; diff --git a/api/src/paths/dwc/validate.test.ts b/api/src/paths/dwc/validate.test.ts deleted file mode 100644 index 59ba5f74a4..0000000000 --- a/api/src/paths/dwc/validate.test.ts +++ /dev/null @@ -1,226 +0,0 @@ -import { GetObjectOutput } from 'aws-sdk/clients/s3'; -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import survey_queries from '../../queries/survey'; -import * as file_utils from '../../utils/file-utils'; -import { ArchiveFile } from '../../utils/media/media-file'; -import * as media_utils from '../../utils/media/media-utils'; -import { getMockDBConnection } from '../../__mocks__/db'; -import * as validate from './validate'; - -chai.use(sinonChai); - -const dbConnectionObj = getMockDBConnection({ - systemUserId: () => { - return 20; - } -}); - -const sampleReq = { - keycloak_token: {}, - body: { - occurrence_submission_id: 1 - } -} as any; - -describe('getOccurrenceSubmission', () => { - const sampleReq = { - keycloak_token: {}, - body: { - occurrence_submission_id: 1 - } - } as any; - - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no occurrence submission id is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = validate.getOccurrenceSubmission(); - await result( - { ...sampleReq, body: { ...sampleReq.body, occurrence_submission_id: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param `occurrence_submission_id`.'); - } - }); - - it('should throw a 400 error when no sql statement returned for getSurveyOccurrenceSubmissionSQL', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(null); - - try { - const result = validate.getOccurrenceSubmission(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should throw a 400 error when no rows returned', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`something`); - - try { - const result = validate.getOccurrenceSubmission(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get survey occurrence submission'); - } - }); - - // TODO update this test as teh s3 key is not part of the `getOccurrenceSubmission` step now - it('should set occurrence_submission in the request on success', async () => { - const nextSpy = sinon.spy(); - const mockQuery = sinon.stub(); - - const expectedRecord = { id: 123, input_file_name: 'someFile', input_key: 'somekey' }; - - mockQuery.resolves({ - rows: [expectedRecord] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`something`); - - const result = validate.getOccurrenceSubmission(); - await result(sampleReq, (null as unknown) as any, nextSpy as any); - - expect(sampleReq.occurrence_submission).to.eql(expectedRecord); - expect(nextSpy).to.have.been.called; - }); -}); - -describe('getS3File', () => { - const updatedSampleReq = { ...sampleReq, s3Key: 'somekey' }; - - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 500 error when no file in S3', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(file_utils, 'getFileFromS3').resolves(undefined); - - try { - const result = validate.getS3File(); - await result(updatedSampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(500); - expect((actualError as HTTPError).message).to.equal('Failed to get file from S3'); - } - }); - - it('should set the s3 file in the request on success', async () => { - const file = { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - }; - - const nextSpy = sinon.spy(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(file_utils, 'getFileFromS3').resolves(file as GetObjectOutput); - - const result = validate.getS3File(); - await result(sampleReq, (null as unknown) as any, nextSpy as any); - - expect(sampleReq.s3File).to.eql(file); - expect(nextSpy).to.have.been.called; - }); -}); - -describe('getOccurrenceSubmissionInputS3Key', () => { - it('sets the occurrence submission input key and calls next', async () => { - const nextSpy = sinon.spy(); - - const sampleRequest = { - occurrence_submission: { - input_key: 'key' - } - } as any; - - const result = validate.getOccurrenceSubmissionInputS3Key(); - await result(sampleRequest, (null as unknown) as any, nextSpy as any); - - expect(sampleRequest.s3Key).to.eql(sampleRequest.occurrence_submission.input_key); - expect(nextSpy).to.have.been.called; - }); -}); - -describe('prepDWCArchive', () => { - const sampleRequest = { - keycloak_token: {}, - s3File: { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - } as any; - - afterEach(() => { - sinon.restore(); - }); - - it('should set parseError when failed to parse s3File', async () => { - const nextSpy = sinon.spy(); - - sinon.stub(media_utils, 'parseUnknownMedia').returns(null); - - const result = validate.prepDWCArchive(); - await result(sampleRequest, (null as unknown) as any, nextSpy as any); - - expect(sampleRequest.parseError).to.eql('Failed to parse submission, file was empty'); - expect(nextSpy).to.have.been.called; - }); - - it('should set parseError when not a valid xlsx csv file', async () => { - const nextSpy = sinon.spy(); - - sinon.stub(media_utils, 'parseUnknownMedia').returns(('not a csv file' as unknown) as ArchiveFile); - - const result = validate.prepDWCArchive(); - await result(sampleRequest, (null as unknown) as any, nextSpy as any); - - expect(sampleRequest.parseError).to.eql('Failed to parse submission, not a valid DwC Archive Zip file'); - expect(nextSpy).to.have.been.called; - }); -}); diff --git a/api/src/paths/dwc/validate.ts b/api/src/paths/dwc/validate.ts deleted file mode 100644 index d16c9e7a3e..0000000000 --- a/api/src/paths/dwc/validate.ts +++ /dev/null @@ -1,545 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../database/db'; -import { HTTP400, HTTP500 } from '../../errors/custom-error'; -import { queries } from '../../queries/queries'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { getFileFromS3 } from '../../utils/file-utils'; -import { getLogger } from '../../utils/logger'; -import { ICsvState, IHeaderError, IRowError } from '../../utils/media/csv/csv-file'; -import { DWCArchive } from '../../utils/media/dwc/dwc-archive-file'; -import { ArchiveFile, IMediaState } from '../../utils/media/media-file'; -import { parseUnknownMedia } from '../../utils/media/media-utils'; -import { ValidationSchemaParser } from '../../utils/media/validation/validation-schema-parser'; - -const defaultLog = getLogger('paths/dwc/validate'); - -export const POST: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.body.project_id), - discriminator: 'ProjectRole' - } - ] - }; - }), - getOccurrenceSubmission(), - getOccurrenceSubmissionInputS3Key(), - getS3File(), - prepDWCArchive(), - persistParseErrors(), - getValidationSchema(), - getValidationRules(), - validateDWCArchive(), - persistValidationResults({ initialSubmissionStatusType: 'Darwin Core Validated' }), - updateOccurrenceSubmission(), - sendResponse() -]; - -export const getValidateAPIDoc = (basicDescription: string, successDescription: string, tags: string[]) => { - return { - description: basicDescription, - tags: tags, - security: [ - { - Bearer: [] - } - ], - requestBody: { - description: 'Request body', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['project_id', 'occurrence_submission_id'], - properties: { - project_id: { - type: 'number' - }, - occurrence_submission_id: { - description: 'A survey occurrence submission ID', - type: 'number', - example: 1 - } - } - } - } - } - }, - responses: { - 200: { - description: successDescription, - content: { - 'application/json': { - schema: { - type: 'object', - properties: { - status: { - type: 'string' - }, - reason: { - type: 'string' - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } - }; -}; - -POST.apiDoc = { - ...getValidateAPIDoc( - 'Validates a Darwin Core (DWC) Archive survey observation submission.', - 'Validate Darwin Core (DWC) Archive survey observation submission OK', - ['survey', 'observation', 'dwc'] - ) -}; - -export function getOccurrenceSubmission(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getOccurrenceSubmission', message: 'params', files: req.body }); - - const connection = getDBConnection(req['keycloak_token']); - - const occurrenceSubmissionId = req.body.occurrence_submission_id; - - if (!occurrenceSubmissionId) { - throw new HTTP400('Missing required body param `occurrence_submission_id`.'); - } - - try { - const sqlStatement = queries.survey.getSurveyOccurrenceSubmissionSQL(occurrenceSubmissionId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - await connection.open(); - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - await connection.commit(); - - if (!response || !response.rows.length) { - throw new HTTP400('Failed to get survey occurrence submission'); - } - - req['occurrence_submission'] = response.rows[0]; - - next(); - } catch (error) { - defaultLog.error({ label: 'getOccurrenceSubmission', message: 'error', error }); - throw error; - } finally { - connection.release(); - } - }; -} - -export function getOccurrenceSubmissionInputS3Key(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getSubmissionS3Key', message: 'params', files: req.body }); - const occurrence_submission = req['occurrence_submission']; - - req['s3Key'] = occurrence_submission.input_key; - - next(); - }; -} - -export function getS3File(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getS3File', message: 'params', files: req.body }); - - try { - const s3Key = req['s3Key']; - - const s3File = await getFileFromS3(s3Key); - - if (!s3File) { - throw new HTTP500('Failed to get file from S3'); - } - - req['s3File'] = s3File; - - next(); - } catch (error) { - defaultLog.error({ label: 'getS3File', message: 'error', error }); - throw error; - } - }; -} - -export function prepDWCArchive(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'prepDWCArchive', message: 's3File' }); - - try { - const s3File = req['s3File']; - - const parsedMedia = parseUnknownMedia(s3File); - - if (!parsedMedia) { - req['parseError'] = 'Failed to parse submission, file was empty'; - - return next(); - } - - if (!(parsedMedia instanceof ArchiveFile)) { - req['parseError'] = 'Failed to parse submission, not a valid DwC Archive Zip file'; - - return next(); - } - - const dwcArchive = new DWCArchive(parsedMedia); - - req['dwcArchive'] = dwcArchive; - - next(); - } catch (error) { - defaultLog.error({ label: 'prepDWCArchive', message: 'error', error }); - throw error; - } - }; -} - -export function persistParseErrors(): RequestHandler { - return async (req, res, next) => { - const parseError = req['parseError']; - - if (!parseError) { - // no errors to persist, skip to next step - return next(); - } - - defaultLog.debug({ label: 'persistParseErrors', message: 'parseError', parseError }); - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const submissionStatusId = await insertSubmissionStatus( - req.body.occurrence_submission_id, - 'Rejected', - connection - ); - - await insertSubmissionMessage(submissionStatusId, 'Error', parseError, 'Miscellaneous', connection); - - await connection.commit(); - - // archive is not parsable, don't continue to next step and return early - return res.status(200).json({ status: 'failed' }); - } catch (error) { - defaultLog.error({ label: 'persistParseErrors', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -function getValidationSchema(): RequestHandler { - return async (req, res, next) => { - req['validationSchema'] = {}; - - next(); - }; -} - -export function getValidationRules(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getValidationRules', message: 's3File' }); - - try { - const validationSchema: JSON = req['validationSchema']; - - const validationSchemaParser = new ValidationSchemaParser(validationSchema); - - req['validationSchemaParser'] = validationSchemaParser; - - next(); - } catch (error) { - defaultLog.error({ label: 'getValidationRules', message: 'error', error }); - throw error; - } - }; -} - -function validateDWCArchive(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'validateDWCArchive', message: 'dwcArchive' }); - - try { - const dwcArchive: DWCArchive = req['dwcArchive']; - - const validationSchemaParser: ValidationSchemaParser = req['validationSchemaParser']; - - const mediaState: IMediaState = dwcArchive.isMediaValid(validationSchemaParser); - - req['mediaState'] = mediaState; - - if (!mediaState.isValid) { - // The file itself is invalid, skip remaining validation - return next(); - } - - const csvState: ICsvState[] = dwcArchive.isContentValid(validationSchemaParser); - - req['csvState'] = csvState; - - next(); - } catch (error) { - defaultLog.error({ label: 'validateDWCArchive', message: 'error', error }); - throw error; - } - }; -} - -export function generateHeaderErrorMessage(fileName: string, headerError: IHeaderError): string { - return `${fileName} - ${headerError.message} - Column: ${headerError.col}`; -} - -export function generateRowErrorMessage(fileName: string, rowError: IRowError): string { - return `${fileName} - ${rowError.message} - Column: ${rowError.col} - Row: ${rowError.row}`; -} - -export function persistValidationResults(statusTypeObject: any): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'persistValidationResults', message: 'validationResults' }); - - const connection = getDBConnection(req['keycloak_token']); - - try { - const mediaState: IMediaState = req['mediaState']; - const csvState: ICsvState[] = req['csvState']; - - await connection.open(); - - let submissionStatusType = statusTypeObject.initialSubmissionStatusType; - if (!mediaState.isValid || csvState?.some((item) => !item.isValid)) { - // At least 1 error exists - submissionStatusType = 'Rejected'; - } - - const submissionStatusId = await insertSubmissionStatus( - req.body.occurrence_submission_id, - submissionStatusType, - connection - ); - - const promises: Promise[] = []; - - mediaState.fileErrors?.forEach((fileError) => { - promises.push( - insertSubmissionMessage(submissionStatusId, 'Error', `${fileError}`, 'Miscellaneous', connection) - ); - }); - - csvState?.forEach((csvStateItem) => { - csvStateItem.headerErrors?.forEach((headerError) => { - promises.push( - insertSubmissionMessage( - submissionStatusId, - 'Error', - generateHeaderErrorMessage(csvStateItem.fileName, headerError), - headerError.errorCode, - connection - ) - ); - }); - - csvStateItem.rowErrors?.forEach((rowError) => { - promises.push( - insertSubmissionMessage( - submissionStatusId, - 'Error', - generateRowErrorMessage(csvStateItem.fileName, rowError), - rowError.errorCode, - connection - ) - ); - }); - }); - - await Promise.all(promises); - - await connection.commit(); - - if (!mediaState.isValid || csvState?.some((item) => !item.isValid)) { - // At least 1 error exists, skip remaining steps - return res.status(200).json({ status: 'failed' }); - } - - return next(); - } catch (error) { - defaultLog.error({ label: 'persistValidationResults', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -export function updateOccurrenceSubmission(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'updateOccurrenceSubmission', message: 'Update output file name and output key' }); - - const dwcArchive: DWCArchive = req['dwcArchive']; - const inputFileName = dwcArchive.rawFile.name; - const s3Key: string = req['s3Key']; - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - // Update occurrence submission record to include the DWC output file name and s3 key (which in this case are the - // same as the input file name and s3 key, as it is already a DWC zip) - await updateSurveyOccurrenceSubmissionWithOutputKey( - req.body.occurrence_submission_id, - inputFileName, - s3Key, - connection - ); - - await connection.commit(); - - next(); - } catch (error) { - defaultLog.debug({ label: 'updateOccurrenceSubmission', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -export function sendResponse(): RequestHandler { - return async (req, res) => { - return res.status(200).json({ status: 'success' }); - }; -} - -/** - * Insert a record into the submission_status table. - * - * @param {number} occurrenceSubmissionId - * @param {string} submissionStatusType - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const insertSubmissionStatus = async ( - occurrenceSubmissionId: number, - submissionStatusType: string, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.insertOccurrenceSubmissionStatusSQL(occurrenceSubmissionId, submissionStatusType); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert survey submission status data'); - } - - return result.id; -}; - -/** - * Insert a record into the submission_message table. - * - * @param {number} submissionStatusId - * @param {string} submissionMessageType - * @param {string} message - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const insertSubmissionMessage = async ( - submissionStatusId: number, - submissionMessageType: string, - message: string, - errorCode: string, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.insertOccurrenceSubmissionMessageSQL( - submissionStatusId, - submissionMessageType, - message, - errorCode - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert survey submission message data'); - } -}; - -/** - * Update existing `occurrence_submission` record with outputKey and outputFileName. - * - * @param {number} submissionId - * @param {string} outputFileName - * @param {string} outputKey - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const updateSurveyOccurrenceSubmissionWithOutputKey = async ( - submissionId: number, - outputFileName: string, - outputKey: string, - connection: IDBConnection -): Promise => { - const updateSqlStatement = queries.survey.updateSurveyOccurrenceSubmissionSQL({ - submissionId, - outputFileName, - outputKey - }); - - if (!updateSqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const updateResponse = await connection.query(updateSqlStatement.text, updateSqlStatement.values); - - if (!updateResponse || !updateResponse.rowCount) { - throw new HTTP400('Failed to update survey occurrence submission record'); - } - - return updateResponse; -}; diff --git a/api/src/paths/dwc/view-occurrences.test.ts b/api/src/paths/dwc/view-occurrences.test.ts index 07f78bf942..ef85d7b1d7 100644 --- a/api/src/paths/dwc/view-occurrences.test.ts +++ b/api/src/paths/dwc/view-occurrences.test.ts @@ -2,10 +2,10 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import occurrence_queries from '../../queries/occurrence'; +import { HTTPError } from '../../errors/http-error'; +import { ErrorService } from '../../services/error-service'; +import { OccurrenceService } from '../../services/occurrence-service'; import { getMockDBConnection } from '../../__mocks__/db'; import * as view_occurrences from './view-occurrences'; @@ -37,68 +37,17 @@ describe('getOccurrencesForView', () => { sinon.restore(); }); - it('should throw a 400 error when no occurrence submission id in request body', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const result = view_occurrences.getOccurrencesForView(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal( - 'Missing required request body param `occurrence_submission_id`' - ); - } - }); - - it('should throw an error when failed to build SQL get occurrences for view statement', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(occurrence_queries, 'getOccurrencesForViewSQL').returns(null); - - try { - const result = view_occurrences.getOccurrencesForView(); - - await result( - { ...sampleReq, body: { occurrence_submission_id: 1 } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get occurrences for view statement'); - } - }); - it('should throw an error when failed to get occurrences view data', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: null - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); + const expectedError = new Error('cannot process request'); + sinon.stub(OccurrenceService.prototype, 'getOccurrences').rejects(expectedError); - sinon.stub(occurrence_queries, 'getOccurrencesForViewSQL').returns(SQL`something`); + sinon.stub(ErrorService.prototype, 'insertSubmissionStatus').resolves(); try { const result = view_occurrences.getOccurrencesForView(); @@ -110,8 +59,7 @@ describe('getOccurrencesForView', () => { ); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get occurrences view data'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); @@ -128,47 +76,19 @@ describe('getOccurrencesForView', () => { organismquantitytype: 'Q-type', eventdate: '2020/04/04' }; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [data] - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(occurrence_queries, 'getOccurrencesForViewSQL').returns(SQL`something`); + sinon.stub(OccurrenceService.prototype, 'getOccurrences').resolves([data]); const result = view_occurrences.getOccurrencesForView(); await result({ ...sampleReq, body: { occurrence_submission_id: 1 } }, sampleRes as any, (null as unknown) as any); - expect(actualResult).to.be.eql([ - { - geometry: { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [50.7, 60.9] - }, - properties: {} - }, - taxonId: data.taxonid, - occurrenceId: data.occurrence_id, - individualCount: Number(data.individualcount), - lifeStage: data.lifestage, - sex: data.sex, - organismQuantity: Number(data.organismquantity), - organismQuantityType: data.organismquantitytype, - vernacularName: data.vernacularname, - eventDate: data.eventdate - } - ]); + expect(actualResult).to.be.eql([data]); }); }); diff --git a/api/src/paths/dwc/view-occurrences.ts b/api/src/paths/dwc/view-occurrences.ts index 23da6e2ac2..0575f40a8e 100644 --- a/api/src/paths/dwc/view-occurrences.ts +++ b/api/src/paths/dwc/view-occurrences.ts @@ -1,11 +1,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../constants/roles'; +import { SUBMISSION_STATUS_TYPE } from '../../constants/status'; import { getDBConnection } from '../../database/db'; -import { HTTP400 } from '../../errors/custom-error'; -import { GetOccurrencesViewData } from '../../models/occurrence-view'; -import { queries } from '../../queries/queries'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; +import { ErrorService } from '../../services/error-service'; +import { OccurrenceService } from '../../services/occurrence-service'; import { getLogger } from '../../utils/logger'; const defaultLog = getLogger('paths/dwc/view-occurrences'); @@ -41,13 +41,10 @@ POST.apiDoc = { type: 'object', required: ['occurrence_submission_id'], properties: { - project_id: { - type: 'number' - }, occurrence_submission_id: { description: 'A survey occurrence submission ID', - type: 'number', - example: 1 + type: 'integer', + minimum: 1 } } } @@ -93,33 +90,25 @@ POST.apiDoc = { export function getOccurrencesForView(): RequestHandler { return async (req, res) => { const connection = getDBConnection(req['keycloak_token']); - - if (!req.body || !req.body.occurrence_submission_id) { - throw new HTTP400('Missing required request body param `occurrence_submission_id`'); - } + const submissionId = req.body.occurrence_submission_id; try { await connection.open(); - - const sqlStatement = queries.occurrence.getOccurrencesForViewSQL(Number(req.body.occurrence_submission_id)); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get occurrences for view statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rows) { - throw new HTTP400('Failed to get occurrences view data'); - } - - const result = new GetOccurrencesViewData(response.rows); - + const service = new OccurrenceService(connection); + const occurrenceData = await service.getOccurrences(req.body.occurrence_submission_id); await connection.commit(); - return res.status(200).json(result.occurrences); + return res.status(200).json(occurrenceData); } catch (error) { defaultLog.error({ label: 'getOccurrencesForView', message: 'error', error }); + + // Unexpected error occurred, rolling DB back to safe state + await connection.rollback(); + + // We still want to track that the submission failed to present to the user + const errorService = new ErrorService(connection); + await errorService.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.SYSTEM_ERROR); + await connection.commit(); throw error; } finally { connection.release(); diff --git a/api/src/paths/gcnotify/send.test.ts b/api/src/paths/gcnotify/send.test.ts index c80f4a4d0c..6089dc8d25 100644 --- a/api/src/paths/gcnotify/send.test.ts +++ b/api/src/paths/gcnotify/send.test.ts @@ -3,7 +3,6 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import { HTTPError } from '../../errors/custom-error'; import { getRequestHandlerMocks } from '../../__mocks__/db'; import * as notify from './send'; @@ -43,125 +42,6 @@ describe('gcnotify', () => { } }; - it('should throw a 400 error when no req body', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = null; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required param: body'); - } - }); - - it('should throw a 400 error when no recipient', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = { ...sampleReq.body, recipient: null }; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: recipient'); - } - }); - - it('should throw a 400 error when no message', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = { ...sampleReq.body, message: null }; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: message'); - } - }); - - it('should throw a 400 error when no message.header', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = { ...sampleReq.body, message: { ...sampleReq.body.message, header: null } }; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: message.header'); - } - }); - - it('should throw a 400 error when no message.body1', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = { ...sampleReq.body, message: { ...sampleReq.body.message, body1: null } }; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: message.body1'); - } - }); - - it('should throw a 400 error when no message.body2', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = { ...sampleReq.body, message: { ...sampleReq.body.message, body2: null } }; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: message.body2'); - } - }); - - it('should throw a 400 error when no message.footer', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = sampleReq.params; - mockReq.body = { ...sampleReq.body, message: { ...sampleReq.body.message, footer: null } }; - - try { - const requestHandler = notify.sendNotification(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: message.footer'); - } - }); - it('sends email notification and returns 200 on success', async () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); diff --git a/api/src/paths/gcnotify/send.ts b/api/src/paths/gcnotify/send.ts index 451d3f94d2..7eb0dd5c9e 100644 --- a/api/src/paths/gcnotify/send.ts +++ b/api/src/paths/gcnotify/send.ts @@ -1,14 +1,14 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../../constants/roles'; -import { HTTP400 } from '../../errors/custom-error'; -import { IgcNotifyPostReturn } from '../../models/gcnotify'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { GCNotifyService } from '../../services/gcnotify-service'; +import { GCNotifyService, IgcNotifyPostReturn } from '../../services/gcnotify-service'; import { getLogger } from '../../utils/logger'; const defaultLog = getLogger('paths/gcnotify'); +const APP_HOST = process.env.APP_HOST; + export const POST: Operation = [ authorizeRequestHandler(() => { return { @@ -42,17 +42,7 @@ POST.apiDoc = { properties: { recipient: { type: 'object', - oneOf: [ - { - required: ['emailAddress'] - }, - { - required: ['phoneNumber'] - }, - { - required: ['userId'] - } - ], + required: ['emailAddress', 'userId'], properties: { emailAddress: { type: 'string' @@ -107,10 +97,12 @@ POST.apiDoc = { type: 'string' }, reference: { - type: 'string' + type: 'string', + nullable: true }, scheduled_for: { - type: 'string' + type: 'string', + nullable: true }, template: { type: 'object' @@ -149,35 +141,7 @@ POST.apiDoc = { export function sendNotification(): RequestHandler { return async (req, res) => { const recipient = req.body?.recipient || null; - const message = req.body?.message || null; - - if (!req.body) { - throw new HTTP400('Missing required param: body'); - } - - if (!recipient) { - throw new HTTP400('Missing required body param: recipient'); - } - - if (!message) { - throw new HTTP400('Missing required body param: message'); - } - - if (!message.header) { - throw new HTTP400('Missing required body param: message.header'); - } - - if (!message.body1) { - throw new HTTP400('Missing required body param: message.body1'); - } - - if (!message.body2) { - throw new HTTP400('Missing required body param: message.body2'); - } - - if (!message.footer) { - throw new HTTP400('Missing required body param: message.footer'); - } + const message = { ...req.body?.message, footer: `To access the site, [${APP_HOST}](${APP_HOST})` } || null; try { const gcnotifyService = new GCNotifyService(); @@ -191,9 +155,10 @@ export function sendNotification(): RequestHandler { response = await gcnotifyService.sendPhoneNumberGCNotification(recipient.phoneNumber, message); } - if (recipient.userId) { - defaultLog.error({ label: 'send gcnotify', message: 'email and sms from Id not implemented yet' }); - } + //TODO: send an email or sms depending on users ID and data + // if (recipient.userId) { + // defaultLog.error({ label: 'send gcnotify', message: 'email and sms from Id not implemented yet' }); + // } return res.status(200).json(response); } catch (error) { diff --git a/api/src/paths/logger.test.ts b/api/src/paths/logger.test.ts index d4bf107e56..c8161fa46e 100644 --- a/api/src/paths/logger.test.ts +++ b/api/src/paths/logger.test.ts @@ -1,6 +1,6 @@ import { expect } from 'chai'; import { describe } from 'mocha'; -import { HTTPError } from '../errors/custom-error'; +import { HTTPError } from '../errors/http-error'; import * as logger from './logger'; describe('logger', () => { diff --git a/api/src/paths/logger.ts b/api/src/paths/logger.ts index 676d2554a0..7826c31839 100644 --- a/api/src/paths/logger.ts +++ b/api/src/paths/logger.ts @@ -1,7 +1,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../constants/roles'; -import { HTTP400 } from '../errors/custom-error'; +import { HTTP400 } from '../errors/http-error'; import { authorizeRequestHandler } from '../request-handlers/security/authorization'; import { setLogLevel, WinstonLogLevel, WinstonLogLevels } from '../utils/logger'; diff --git a/api/src/paths/permit/create-no-sampling.test.ts b/api/src/paths/permit/create-no-sampling.test.ts deleted file mode 100644 index 94dac152d7..0000000000 --- a/api/src/paths/permit/create-no-sampling.test.ts +++ /dev/null @@ -1,60 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import { PermitService } from '../../services/permit-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; -import { createNoSamplePermits } from './create-no-sampling'; - -chai.use(sinonChai); - -describe('create-no-sampling', () => { - describe('createNoSamplePermits', () => { - afterEach(() => { - sinon.restore(); - }); - - it('catches error, calls rollback, and re-throws error', async () => { - const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(PermitService.prototype, 'createNoSamplePermits').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = createNoSamplePermits(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.rollback).to.have.been.called; - expect(dbConnectionObj.release).to.have.been.called; - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); - - it('creates a new non sample permit', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(PermitService.prototype, 'createNoSamplePermits').resolves([1]); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = createNoSamplePermits(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.statusValue).to.equal(200); - expect(mockRes.jsonValue).to.eql({ ids: [1] }); - }); - }); -}); diff --git a/api/src/paths/permit/create-no-sampling.ts b/api/src/paths/permit/create-no-sampling.ts deleted file mode 100644 index 643985536a..0000000000 --- a/api/src/paths/permit/create-no-sampling.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../constants/roles'; -import { getDBConnection } from '../../database/db'; -import { permitNoSamplingPostBody, permitNoSamplingResponseBody } from '../../openapi/schemas/permit-no-sampling'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { PermitService } from '../../services/permit-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('/api/permit/create-no-sampling'); - -export const POST: Operation = [ - authorizeRequestHandler((req) => { - return { - or: [ - { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR], - discriminator: 'SystemRole' - }, - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - createNoSamplePermits() -]; - -POST.apiDoc = { - description: 'Creates new no sample permit records.', - tags: ['no-sample-permit'], - security: [ - { - Bearer: [] - } - ], - requestBody: { - description: 'No sample permits post request object.', - content: { - 'application/json': { - schema: { - ...(permitNoSamplingPostBody as object) - } - } - } - }, - responses: { - 200: { - description: 'No sample permits response object.', - content: { - 'application/json': { - schema: { - ...(permitNoSamplingResponseBody as object) - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -/** - * Creates new no sample permit records. - * - * @returns {RequestHandler} - */ -export function createNoSamplePermits(): RequestHandler { - return async (req, res) => { - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - const permitService = new PermitService(connection); - - const result = await permitService.createNoSamplePermits(req.body); - - await connection.commit(); - - return res.status(200).json({ ids: result }); - } catch (error) { - defaultLog.error({ label: 'createNoSamplePermits', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/permit/get-no-sampling.test.ts b/api/src/paths/permit/get-no-sampling.test.ts deleted file mode 100644 index 585132107e..0000000000 --- a/api/src/paths/permit/get-no-sampling.test.ts +++ /dev/null @@ -1,62 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import { PermitService } from '../../services/permit-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; -import { getNonSamplingPermits } from './get-no-sampling'; - -chai.use(sinonChai); - -describe('get-no-sampling', () => { - describe('getNonSamplingPermits', () => { - afterEach(() => { - sinon.restore(); - }); - - it('catches error, calls rollback, and re-throws error', async () => { - const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(PermitService.prototype, 'getNonSamplingPermits').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getNonSamplingPermits(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.rollback).to.have.been.called; - expect(dbConnectionObj.release).to.have.been.called; - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); - - it('gets non sample permits', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon - .stub(PermitService.prototype, 'getNonSamplingPermits') - .resolves([{ permit_id: '1', number: '2', type: '3' }]); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getNonSamplingPermits(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.statusValue).to.equal(200); - expect(mockRes.jsonValue).to.eql([{ permit_id: '1', number: '2', type: '3' }]); - }); - }); -}); diff --git a/api/src/paths/permit/get-no-sampling.ts b/api/src/paths/permit/get-no-sampling.ts deleted file mode 100644 index ac0a7d583e..0000000000 --- a/api/src/paths/permit/get-no-sampling.ts +++ /dev/null @@ -1,107 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../constants/roles'; -import { getDBConnection } from '../../database/db'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { PermitService } from '../../services/permit-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('/api/permit/get-no-sampling'); - -export const GET: Operation = [ - authorizeRequestHandler((req) => { - return { - or: [ - { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR], - discriminator: 'SystemRole' - }, - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR, PROJECT_ROLE.PROJECT_VIEWER], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - getNonSamplingPermits() -]; - -GET.apiDoc = { - description: 'Fetches a list of non-sampling permits.', - tags: ['non-sampling-permits'], - security: [ - { - Bearer: [] - } - ], - responses: { - 200: { - description: 'Non-sampling permits get response array.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - title: 'Non-sampling permit Get Response Object', - type: 'object', - properties: { - id: { - type: 'number' - }, - number: { - type: 'string' - }, - type: { - type: 'string' - } - } - }, - description: 'Non-sampling permits' - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getNonSamplingPermits(): RequestHandler { - return async (req, res) => { - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const systemUserId = connection.systemUserId(); - - const permitService = new PermitService(connection); - - const getNonSamplingPermitsData = await permitService.getNonSamplingPermits(systemUserId); - - await connection.commit(); - - return res.status(200).json(getNonSamplingPermitsData); - } catch (error) { - defaultLog.error({ label: 'getNonSamplingPermits', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/permit/list.test.ts b/api/src/paths/permit/list.test.ts deleted file mode 100644 index 9214217a58..0000000000 --- a/api/src/paths/permit/list.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import { PermitService } from '../../services/permit-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; -import { getAllPermits } from './list'; - -chai.use(sinonChai); - -describe('permit-list', () => { - describe('getAllPermits', () => { - afterEach(() => { - sinon.restore(); - }); - - it('catches error, calls rollback, and re-throws error', async () => { - const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(PermitService.prototype, 'getAllPermits').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getAllPermits(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.rollback).to.have.been.called; - expect(dbConnectionObj.release).to.have.been.called; - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); - - it('gets non sample permits', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon - .stub(PermitService.prototype, 'getAllPermits') - .resolves([{ id: '1', number: '2', type: '3', coordinator_agency: '4', project_name: '5' }]); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getAllPermits(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.statusValue).to.equal(200); - expect(mockRes.jsonValue).to.eql([ - { id: '1', number: '2', type: '3', coordinator_agency: '4', project_name: '5' } - ]); - }); - }); -}); diff --git a/api/src/paths/permit/list.ts b/api/src/paths/permit/list.ts deleted file mode 100644 index 42621829b7..0000000000 --- a/api/src/paths/permit/list.ts +++ /dev/null @@ -1,111 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../constants/roles'; -import { getDBConnection } from '../../database/db'; -import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { PermitService } from '../../services/permit-service'; -import { getLogger } from '../../utils/logger'; - -const defaultLog = getLogger('/api/permits/list'); - -export const GET: Operation = [ - authorizeRequestHandler((req) => { - return { - or: [ - { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR], - discriminator: 'SystemRole' - }, - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR, PROJECT_ROLE.PROJECT_VIEWER], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - getAllPermits() -]; - -GET.apiDoc = { - description: 'Fetches a list of all permits by system user id.', - tags: ['permits'], - security: [ - { - Bearer: [] - } - ], - responses: { - 200: { - description: 'Permits get response array.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - title: 'Permit Get Response Object', - type: 'object', - properties: { - number: { - type: 'string' - }, - type: { - type: 'string' - }, - coordinator_agency: { - type: 'string' - }, - project_name: { - type: 'string', - nullable: true - } - } - }, - description: 'All permits in the permits table for the appropriate system user' - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getAllPermits(): RequestHandler { - return async (req, res) => { - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const systemUserId = connection.systemUserId(); - - const permitService = new PermitService(connection); - - const getPermitsData = await permitService.getAllPermits(systemUserId); - - await connection.commit(); - - return res.status(200).json(getPermitsData); - } catch (error) { - defaultLog.error({ label: 'getAllPermits', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/create.test.ts b/api/src/paths/project/create.test.ts index 246391f48e..8a6f33a0b1 100644 --- a/api/src/paths/project/create.test.ts +++ b/api/src/paths/project/create.test.ts @@ -4,7 +4,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; +import { HTTPError } from '../../errors/http-error'; +import { PlatformService } from '../../services/platform-service'; import { ProjectService } from '../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; import { createProject, POST } from './create'; @@ -32,6 +33,8 @@ describe('create', () => { sinon.stub(ProjectService.prototype, 'createProject').resolves(1); + sinon.stub(PlatformService.prototype, 'submitDwCAMetadataPackage').resolves(); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); try { diff --git a/api/src/paths/project/create.ts b/api/src/paths/project/create.ts index 970bfb7453..bb363f8d79 100644 --- a/api/src/paths/project/create.ts +++ b/api/src/paths/project/create.ts @@ -5,6 +5,7 @@ import { getDBConnection } from '../../database/db'; import { PostProjectObject } from '../../models/project-create'; import { projectCreatePostRequestObject, projectIdResponseObject } from '../../openapi/schemas/project'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; +import { PlatformService } from '../../services/platform-service'; import { ProjectService } from '../../services/project-service'; import { getLogger } from '../../utils/logger'; @@ -15,7 +16,7 @@ export const POST: Operation = [ return { and: [ { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR], + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR, SYSTEM_ROLE.DATA_ADMINISTRATOR], discriminator: 'SystemRole' } ] @@ -89,6 +90,14 @@ export function createProject(): RequestHandler { const projectId = await projectService.createProject(sanitizedProjectPostData); + try { + const platformService = new PlatformService(connection); + await platformService.submitDwCAMetadataPackage(projectId); + } catch (error) { + // Don't fail the rest of the endpoint if submitting metadata fails + defaultLog.error({ label: 'createProject->submitDwCAMetadataPackage', message: 'error', error }); + } + await connection.commit(); return res.status(200).json({ id: projectId }); diff --git a/api/src/paths/project/list.test.ts b/api/src/paths/project/list.test.ts index 98b9e9b379..c191e280a6 100644 --- a/api/src/paths/project/list.test.ts +++ b/api/src/paths/project/list.test.ts @@ -5,7 +5,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import { SYSTEM_ROLE } from '../../constants/roles'; import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; +import { HTTPError } from '../../errors/http-error'; import * as authorization from '../../request-handlers/security/authorization'; import { ProjectService } from '../../services/project-service'; import { getMockDBConnection } from '../../__mocks__/db'; diff --git a/api/src/paths/project/list.ts b/api/src/paths/project/list.ts index b47fda903e..779368af8d 100644 --- a/api/src/paths/project/list.ts +++ b/api/src/paths/project/list.ts @@ -40,10 +40,6 @@ GET.apiDoc = { type: 'string', nullable: true }, - permit_number: { - type: 'string', - nullable: true - }, project_type: { type: 'string', nullable: true diff --git a/api/src/paths/project/{projectId}/attachments/list.test.ts b/api/src/paths/project/{projectId}/attachments/list.test.ts index 69b39fac03..f9c9f0c6fd 100644 --- a/api/src/paths/project/{projectId}/attachments/list.test.ts +++ b/api/src/paths/project/{projectId}/attachments/list.test.ts @@ -2,43 +2,21 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; -import project_queries from '../../../../queries/project'; -import { getMockDBConnection } from '../../../../__mocks__/db'; -import * as listAttachments from './list'; - +import { HTTPError } from '../../../../errors/http-error'; +import { GetAttachmentsData } from '../../../../models/project-survey-attachments'; +import { AttachmentService } from '../../../../services/attachment-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; +import * as list from './list'; chai.use(sinonChai); -describe('lists the project attachments', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - +describe('getAttachments', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no sql statement returned for getProjectAttachmentsSQL', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -46,189 +24,60 @@ describe('lists the project attachments', () => { } }); - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(null); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + const expectedError = new Error('cannot process request'); + + sinon.stub(AttachmentService.prototype, 'getProjectAttachments').rejects(expectedError); try { - const result = listAttachments.getAttachments(); + const result = list.getAttachments(); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, mockRes, mockNext); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return a list of project attachments where the lastModified is the create_date', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - file_name: 'name1', - create_date: '2020-01-01', - update_date: '', - file_size: 50, - file_type: 'type', - security_token: 'token123' - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - id: 134, - file_name: 'name2', - create_date: '2020-01-01', - update_date: '', - file_size: 50, - security_token: 'token123' - } - ] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - - const result = listAttachments.getAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.an('object'); - expect(actualResult).to.have.property('attachmentsList'); - - expect(actualResult.attachmentsList).to.be.an('array'); - expect(actualResult.attachmentsList).to.have.length(2); - - expect(actualResult.attachmentsList[0].fileName).to.equal('name1'); - expect(actualResult.attachmentsList[0].fileType).to.equal('type'); - expect(actualResult.attachmentsList[0].id).to.equal(13); - expect(actualResult.attachmentsList[0].lastModified).to.match(new RegExp('2020-01-01T.*')); - expect(actualResult.attachmentsList[0].size).to.equal(50); - expect(actualResult.attachmentsList[0].securityToken).to.equal('token123'); - - expect(actualResult.attachmentsList[1].fileName).to.equal('name2'); - expect(actualResult.attachmentsList[1].fileType).to.equal('Report'); - expect(actualResult.attachmentsList[1].id).to.equal(134); - expect(actualResult.attachmentsList[1].lastModified).to.match(new RegExp('2020-01-01T.*')); - expect(actualResult.attachmentsList[1].size).to.equal(50); - expect(actualResult.attachmentsList[1].securityToken).to.equal('token123'); - }); - - it('should return a list of project attachments where the lastModified is the update_date', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - file_name: 'name1', - create_date: '2020-01-01', - update_date: '2020-01-02', - file_size: 50, - file_type: 'type', - security_token: 'token123' - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - id: 134, - file_name: 'name2', - create_date: '2020-01-01', - update_date: '2020-01-02', - file_size: 50, - security_token: 'token123' - } - ] - }); - + it('should succeed with valid params', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - - const result = listAttachments.getAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.an('object'); - expect(actualResult).to.have.property('attachmentsList'); + const getProjectAttachmentsStub = sinon.stub(AttachmentService.prototype, 'getProjectAttachments').resolves([]); + sinon.stub(AttachmentService.prototype, 'getProjectReportAttachments').resolves([]); - expect(actualResult.attachmentsList).to.be.an('array'); - expect(actualResult.attachmentsList).to.have.length(2); + const expectedResponse = new GetAttachmentsData([], []); - expect(actualResult.attachmentsList[0].fileName).to.equal('name1'); - expect(actualResult.attachmentsList[0].fileType).to.equal('type'); - expect(actualResult.attachmentsList[0].id).to.equal(13); - expect(actualResult.attachmentsList[0].lastModified).to.match(new RegExp('2020-01-02T.*')); - expect(actualResult.attachmentsList[0].size).to.equal(50); - expect(actualResult.attachmentsList[0].securityToken).to.equal('token123'); - - expect(actualResult.attachmentsList[1].fileName).to.equal('name2'); - expect(actualResult.attachmentsList[1].fileType).to.equal('Report'); - expect(actualResult.attachmentsList[1].id).to.equal(134); - expect(actualResult.attachmentsList[1].lastModified).to.match(new RegExp('2020-01-02T.*')); - expect(actualResult.attachmentsList[1].size).to.equal(50); - expect(actualResult.attachmentsList[1].securityToken).to.equal('token123'); - }); - - it('should return null if the project has no attachments, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: undefined }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - - const result = listAttachments.getAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.null; - }); + body: {} + }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - it('should throw a 400 error when no projectId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const result = list.getAttachments(); - try { - const result = listAttachments.getAttachments(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } + await result((mockReq as unknown) as any, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(getProjectAttachmentsStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/attachments/list.ts b/api/src/paths/project/{projectId}/attachments/list.ts index db16dfca84..202631f773 100644 --- a/api/src/paths/project/{projectId}/attachments/list.ts +++ b/api/src/paths/project/{projectId}/attachments/list.ts @@ -2,10 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../constants/roles'; import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; import { GetAttachmentsData } from '../../../../models/project-survey-attachments'; -import { queries } from '../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../services/attachment-service'; import { getLogger } from '../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/attachments/list'); @@ -38,7 +37,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -55,7 +55,7 @@ GET.apiDoc = { type: 'array', items: { type: 'object', - required: ['id', 'fileName', 'fileType', 'lastModified', 'securityToken', 'size'], + required: ['id', 'fileName', 'fileType', 'lastModified', 'size'], properties: { id: { type: 'number' @@ -69,11 +69,6 @@ GET.apiDoc = { lastModified: { type: 'string' }, - securityToken: { - description: 'The security token of the attachment', - type: 'string', - nullable: true - }, size: { type: 'number' } @@ -98,43 +93,20 @@ export function getAttachments(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'Get attachments list', message: 'params', req_params: req.params }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - const connection = getDBConnection(req['keycloak_token']); + const projectId = Number(req.params.projectId); try { - const getProjectAttachmentsSQLStatement = queries.project.getProjectAttachmentsSQL(Number(req.params.projectId)); - const getProjectReportAttachmentsSQLStatement = queries.project.getProjectReportAttachmentsSQL( - Number(req.params.projectId) - ); - - if (!getProjectAttachmentsSQLStatement || !getProjectReportAttachmentsSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); - const attachmentsData = await connection.query( - getProjectAttachmentsSQLStatement.text, - getProjectAttachmentsSQLStatement.values - ); + const attachmentService = new AttachmentService(connection); - const reportAttachmentsData = await connection.query( - getProjectReportAttachmentsSQLStatement.text, - getProjectReportAttachmentsSQLStatement.values - ); + const attachmentsData = await attachmentService.getProjectAttachments(projectId); + const reportAttachmentsData = await attachmentService.getProjectReportAttachments(projectId); await connection.commit(); - const getAttachmentsData = - (attachmentsData && - reportAttachmentsData && - attachmentsData.rows && - reportAttachmentsData.rows && - new GetAttachmentsData([...attachmentsData.rows, ...reportAttachmentsData.rows])) || - null; + const getAttachmentsData = new GetAttachmentsData(attachmentsData, reportAttachmentsData); return res.status(200).json(getAttachmentsData); } catch (error) { diff --git a/api/src/paths/project/{projectId}/attachments/report/upload.test.ts b/api/src/paths/project/{projectId}/attachments/report/upload.test.ts index 726f4fb84f..252c96b481 100644 --- a/api/src/paths/project/{projectId}/attachments/report/upload.test.ts +++ b/api/src/paths/project/{projectId}/attachments/report/upload.test.ts @@ -3,7 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; +import { HTTPError } from '../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../services/attachment-service'; import * as file_utils from '../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../__mocks__/db'; import * as upload from './upload'; @@ -37,36 +38,6 @@ describe('uploadMedia', () => { } } as any; - let actualResult: any = null; - - const mockRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - } as any; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = upload.uploadMedia(); - - await result( - { ...mockReq, params: { ...mockReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing projectId'); - } - }); - it('should throw an error when files are missing', async () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); @@ -81,7 +52,7 @@ describe('uploadMedia', () => { } }); - it('should throw a 400 error when file format incorrect', async () => { + it('should throw an error when file format incorrect', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -89,20 +60,20 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + sinon.stub(file_utils, 'scanFileForVirus').resolves(false); try { const result = upload.uploadMedia(); - await result({ ...mockReq, files: ['file1'] }, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); } }); - it('should throw a 400 error when file contains malicious content', async () => { + it('should throw an error if failure occurs', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -110,22 +81,22 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertProjectReportAttachment').resolves({ id: 1, revision_count: 0, key: 'key' }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(false); + sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'upsertProjectReportAttachment').rejects(expectedError); try { const result = upload.uploadMedia(); - await result(mockReq, mockRes as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return id and revision_count on success (with username and email) when attachmentType is Other', async () => { + it('should succeed with valid params', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -134,13 +105,29 @@ describe('uploadMedia', () => { }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertProjectReportAttachment').resolves({ id: 1, revision_count: 0, key: 'key' }); + sinon.stub(file_utils, 'uploadFileToS3').resolves(); + + const expectedResponse = { attachmentId: 1, revision_count: 1 }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - const result = upload.uploadMedia(); + const upsertProjectReportAttachmentStub = sinon + .stub(AttachmentService.prototype, 'upsertProjectReportAttachment') + .resolves({ id: 1, revision_count: 1, key: 'string' }); - await result(mockReq, mockRes as any, (null as unknown) as any); + const result = upload.uploadMedia(); - expect(actualResult).to.eql({ attachmentId: 1, revision_count: 0 }); + await result(mockReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(upsertProjectReportAttachmentStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/attachments/report/upload.ts b/api/src/paths/project/{projectId}/attachments/report/upload.ts index 228e0b0a02..d408010ed8 100644 --- a/api/src/paths/project/{projectId}/attachments/report/upload.ts +++ b/api/src/paths/project/{projectId}/attachments/report/upload.ts @@ -1,16 +1,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { - IReportAttachmentAuthor, - PostReportAttachmentMetadata, - PutReportAttachmentMetadata -} from '../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../queries/queries'; +import { getDBConnection } from '../../../../../database/db'; +import { HTTP400 } from '../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; -import { generateS3FileKey, scanFileForVirus, uploadFileToS3 } from '../../../../../utils/file-utils'; +import { AttachmentService } from '../../../../../services/attachment-service'; +import { scanFileForVirus, uploadFileToS3 } from '../../../../../utils/file-utils'; import { getLogger } from '../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/attachments/upload'); @@ -41,6 +36,10 @@ POST.apiDoc = { { in: 'path', name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, required: true } ], @@ -135,10 +134,6 @@ export function uploadMedia(): RequestHandler { return async (req, res) => { const rawMediaArray: Express.Multer.File[] = req.files as Express.Multer.File[]; - if (!req.params.projectId) { - throw new HTTP400('Missing projectId'); - } - if (!rawMediaArray || !rawMediaArray.length) { // no media objects included, skipping media upload step throw new HTTP400('Missing upload data'); @@ -164,12 +159,13 @@ export function uploadMedia(): RequestHandler { throw new HTTP400('Malicious content detected, upload cancelled'); } + const attachmentService = new AttachmentService(connection); + //Upsert a report attachment - const upsertResult = await upsertProjectReportAttachment( + const upsertResult = await attachmentService.upsertProjectReportAttachment( rawMediaFile, Number(req.params.projectId), - req.body.attachmentMeta, - connection + req.body.attachmentMeta ); // Upload file to S3 @@ -193,137 +189,3 @@ export function uploadMedia(): RequestHandler { } }; } - -export const upsertProjectReportAttachment = async ( - file: Express.Multer.File, - projectId: number, - attachmentMeta: any, - connection: IDBConnection -): Promise<{ id: number; revision_count: number; key: string }> => { - const getSqlStatement = queries.project.getProjectReportAttachmentByFileNameSQL(projectId, file.originalname); - - if (!getSqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const key = generateS3FileKey({ projectId: projectId, fileName: file.originalname, folder: 'reports' }); - - const getResponse = await connection.query(getSqlStatement.text, getSqlStatement.values); - - let metadata; - let attachmentResult: { id: number; revision_count: number }; - - if (getResponse && getResponse.rowCount > 0) { - // Existing attachment with matching name found, update it - metadata = new PutReportAttachmentMetadata(attachmentMeta); - attachmentResult = await updateProjectReportAttachment(file, projectId, metadata, connection); - } else { - // No matching attachment found, insert new attachment - metadata = new PostReportAttachmentMetadata(attachmentMeta); - attachmentResult = await insertProjectReportAttachment( - file, - projectId, - new PostReportAttachmentMetadata(attachmentMeta), - key, - connection - ); - } - - // Delete any existing attachment author records - await deleteProjectReportAttachmentAuthors(attachmentResult.id, connection); - - const promises = []; - - // Insert any new attachment author records - promises.push( - metadata.authors.map((author) => insertProjectReportAttachmentAuthor(attachmentResult.id, author, connection)) - ); - - await Promise.all(promises); - - return { ...attachmentResult, key }; -}; - -export const insertProjectReportAttachment = async ( - file: Express.Multer.File, - projectId: number, - attachmentMeta: PostReportAttachmentMetadata, - key: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.project.postProjectReportAttachmentSQL( - file.originalname, - file.size, - projectId, - key, - attachmentMeta - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to insert project attachment data'); - } - - return response.rows[0]; -}; - -export const updateProjectReportAttachment = async ( - file: Express.Multer.File, - projectId: number, - attachmentMeta: PutReportAttachmentMetadata, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.project.putProjectReportAttachmentSQL(projectId, file.originalname, attachmentMeta); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to update project attachment data'); - } - - return response.rows[0]; -}; - -export const deleteProjectReportAttachmentAuthors = async ( - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.project.deleteProjectReportAttachmentAuthorsSQL(attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete attachment report authors statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response) { - throw new HTTP400('Failed to delete attachment report authors records'); - } -}; - -export const insertProjectReportAttachmentAuthor = async ( - attachmentId: number, - author: IReportAttachmentAuthor, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.project.insertProjectReportAttachmentAuthorSQL(attachmentId, author); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert attachment report author statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert attachment report author record'); - } -}; diff --git a/api/src/paths/project/{projectId}/attachments/upload.test.ts b/api/src/paths/project/{projectId}/attachments/upload.test.ts index 966f8cabef..1c636e75d1 100644 --- a/api/src/paths/project/{projectId}/attachments/upload.test.ts +++ b/api/src/paths/project/{projectId}/attachments/upload.test.ts @@ -3,7 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; +import { HTTPError } from '../../../../errors/http-error'; +import { AttachmentService } from '../../../../services/attachment-service'; import * as file_utils from '../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../__mocks__/db'; import * as upload from './upload'; @@ -35,36 +36,6 @@ describe('uploadMedia', () => { body: {} } as any; - let actualResult: any = null; - - const mockRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - } as any; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = upload.uploadMedia(); - - await result( - { ...mockReq, params: { ...mockReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing projectId'); - } - }); - it('should throw an error when files are missing', async () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); @@ -79,7 +50,7 @@ describe('uploadMedia', () => { } }); - it('should throw a 400 error when file format incorrect', async () => { + it('should throw an error when file format incorrect', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -87,20 +58,20 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + sinon.stub(file_utils, 'scanFileForVirus').resolves(false); try { const result = upload.uploadMedia(); - await result({ ...mockReq, files: ['file1'] }, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); } }); - it('should throw a 400 error when file contains malicious content', async () => { + it('should throw an error if failure occurs', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -108,9 +79,10 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertProjectAttachment').resolves({ id: 1, revision_count: 0, key: 'key' }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(false); + sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'upsertProjectAttachment').rejects(expectedError); try { const result = upload.uploadMedia(); @@ -118,12 +90,11 @@ describe('uploadMedia', () => { await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return id and revision_count on success (with username and email) with valid parameters', async () => { + it('should succeed with valid params', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -132,13 +103,29 @@ describe('uploadMedia', () => { }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertProjectAttachment').resolves({ id: 1, revision_count: 0, key: 'key' }); + sinon.stub(file_utils, 'uploadFileToS3').resolves(); + + const expectedResponse = { attachmentId: 1, revision_count: 1 }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - const result = upload.uploadMedia(); + const upsertProjectAttachmentStub = sinon + .stub(AttachmentService.prototype, 'upsertProjectAttachment') + .resolves({ id: 1, revision_count: 1, key: 'string' }); - await result(mockReq, mockRes as any, (null as unknown) as any); + const result = upload.uploadMedia(); - expect(actualResult).to.eql({ attachmentId: 1, revision_count: 0 }); + await result(mockReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(upsertProjectAttachmentStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/attachments/upload.ts b/api/src/paths/project/{projectId}/attachments/upload.ts index 729212fec9..eb2a8a484c 100644 --- a/api/src/paths/project/{projectId}/attachments/upload.ts +++ b/api/src/paths/project/{projectId}/attachments/upload.ts @@ -2,11 +2,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../constants/attachments'; import { PROJECT_ROLE } from '../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; -import { queries } from '../../../../queries/queries'; +import { getDBConnection } from '../../../../database/db'; +import { HTTP400 } from '../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; -import { generateS3FileKey, scanFileForVirus, uploadFileToS3 } from '../../../../utils/file-utils'; +import { AttachmentService } from '../../../../services/attachment-service'; +import { scanFileForVirus, uploadFileToS3 } from '../../../../utils/file-utils'; import { getLogger } from '../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/attachments/upload'); @@ -37,6 +37,10 @@ POST.apiDoc = { { in: 'path', name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, required: true } ], @@ -107,17 +111,11 @@ export function uploadMedia(): RequestHandler { return async (req, res) => { const rawMediaArray: Express.Multer.File[] = req.files as Express.Multer.File[]; - if (!req.params.projectId) { - throw new HTTP400('Missing projectId'); - } - if (!rawMediaArray || !rawMediaArray.length) { // no media objects included, skipping media upload step throw new HTTP400('Missing upload data'); } - if (!req.body) { - throw new HTTP400('Missing request body'); - } + const rawMediaFile: Express.Multer.File = rawMediaArray[0]; defaultLog.debug({ @@ -138,11 +136,12 @@ export function uploadMedia(): RequestHandler { throw new HTTP400('Malicious content detected, upload cancelled'); } - const upsertResult = await upsertProjectAttachment( + const attachmentService = new AttachmentService(connection); + + const upsertResult = await attachmentService.upsertProjectAttachment( rawMediaFile, Number(req.params.projectId), - ATTACHMENT_TYPE.OTHER, - connection + ATTACHMENT_TYPE.OTHER ); // Upload file to S3 @@ -166,81 +165,3 @@ export function uploadMedia(): RequestHandler { } }; } - -export const upsertProjectAttachment = async ( - file: Express.Multer.File, - projectId: number, - attachmentType: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number; key: string }> => { - const getSqlStatement = queries.project.getProjectAttachmentByFileNameSQL(projectId, file.originalname); - - if (!getSqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const key = generateS3FileKey({ projectId: projectId, fileName: file.originalname }); - - const getResponse = await connection.query(getSqlStatement.text, getSqlStatement.values); - - let attachmentResult: { id: number; revision_count: number }; - - if (getResponse && getResponse.rowCount > 0) { - // Existing attachment with matching name found, update it - attachmentResult = await updateProjectAttachment(file, projectId, attachmentType, connection); - } else { - // No matching attachment found, insert new attachment - attachmentResult = await insertProjectAttachment(file, projectId, attachmentType, key, connection); - } - - return { ...attachmentResult, key }; -}; - -export const insertProjectAttachment = async ( - file: Express.Multer.File, - projectId: number, - attachmentType: string, - key: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.project.postProjectAttachmentSQL( - file.originalname, - file.size, - attachmentType, - projectId, - key - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to insert project attachment data'); - } - - return response.rows[0]; -}; - -export const updateProjectAttachment = async ( - file: Express.Multer.File, - projectId: number, - attachmentType: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.project.putProjectAttachmentSQL(projectId, file.originalname, attachmentType); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to update project attachment data'); - } - - return response.rows[0]; -}; diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.test.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.test.ts index 9bf9679478..89c946a80b 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.test.ts @@ -1,16 +1,14 @@ -import { DeleteObjectOutput } from 'aws-sdk/clients/s3'; +import { S3 } from 'aws-sdk'; import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import project_queries from '../../../../../queries/project'; -import security_queries from '../../../../../queries/security'; +import { HTTPError } from '../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../services/attachment-service'; import * as file_utils from '../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as delete_attachment from './delete'; +import * as deleteAttachment from './delete'; chai.use(sinonChai); @@ -19,90 +17,8 @@ describe('deleteAttachment', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - attachmentId: 2 - }, - body: { - attachmentType: 'Image', - securityToken: 'token' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - }, - send: () => { - // do nothing - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_attachment.deleteAttachment(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_attachment.deleteAttachment(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_attachment.deleteAttachment(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param `attachmentType`'); - } - }); - - it('should throw a 400 error when no sql statement returned for unsecureAttachmentRecordSQL', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -110,159 +26,121 @@ describe('deleteAttachment', () => { } }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(null); - - try { - const result = delete_attachment.deleteAttachment(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL unsecure record statement'); - } - }); - - it('should throw a 400 error when fails to unsecure attachment record', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ rowCount: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - try { - const result = delete_attachment.deleteAttachment(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to unsecure record'); - } - }); - - it('should throw a 400 error when no sql statement returned for deleteProjectAttachmentSQL', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ rowCount: 1 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(project_queries, 'deleteProjectAttachmentSQL').returns(null); + const expectedError = new Error('cannot process request'); + const deleteProjectReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'deleteProjectReportAttachmentAuthors') + .rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: { attachmentType: 'Report' }, + params: { + projectId: 1, + attachmentId: 2 + } + } as any; try { - const result = delete_attachment.deleteAttachment(); + const result = deleteAttachment.deleteAttachment(); await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete project attachment statement'); + expect(deleteProjectReportAttachmentAuthorsStub).to.be.calledOnce; + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return null when deleting file from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ rowCount: 1 }) - .onSecondCall() - .resolves({ rowCount: 1, rows: [{ key: 's3Key' }] }); - + it('should delete Project `Report` Attachment', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery - }); - - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(project_queries, 'deleteProjectAttachmentSQL').returns(SQL`some query`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves(null); - - const result = delete_attachment.deleteAttachment(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - it('should return null response on success when type is not Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ rowCount: 1 }) - .onSecondCall() - .resolves({ rows: [{ key: 's3Key' }], rowCount: 1 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery + } }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(project_queries, 'deleteProjectAttachmentSQL').returns(SQL`some query`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves('non null response' as DeleteObjectOutput); - - const result = delete_attachment.deleteAttachment(); + const sampleReq = { + keycloak_token: {}, + body: { attachmentType: 'Report' }, + params: { + projectId: 1, + attachmentId: 2 + } + } as any; + + const deleteProjectReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'deleteProjectReportAttachmentAuthors') + .resolves(); + + const deleteProjectReportAttachmentStub = sinon + .stub(AttachmentService.prototype, 'deleteProjectReportAttachment') + .resolves({ key: 'string' }); + + const fileUtilsStub = sinon + .stub(file_utils, 'deleteFileFromS3') + .resolves((true as unknown) as S3.DeleteObjectOutput); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + send: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = deleteAttachment.deleteAttachment(); - expect(actualResult).to.equal(null); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(undefined); + expect(deleteProjectReportAttachmentAuthorsStub).to.be.calledOnce; + expect(deleteProjectReportAttachmentStub).to.be.calledOnce; + expect(fileUtilsStub).to.be.calledOnce; }); - it('should return null response on success when type is Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ rowCount: 1 }) - .onSecondCall() - .resolves({ rowCount: 1 }) - .onThirdCall() - .resolves({ rows: [{ key: 's3Key' }], rowCount: 1 }); - + it('should delete Project Attachment', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(project_queries, 'deleteProjectReportAttachmentSQL').returns(SQL`some query`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves('non null response' as DeleteObjectOutput); - - const result = delete_attachment.deleteAttachment(); + const sampleReq = { + keycloak_token: {}, + body: { attachmentType: 'Attachment' }, + params: { + projectId: 1, + attachmentId: 2 + } + } as any; + + const deleteProjectAttachmentStub = sinon + .stub(AttachmentService.prototype, 'deleteProjectAttachment') + .resolves({ key: 'string' }); + + const fileUtilsStub = sinon.stub(file_utils, 'deleteFileFromS3').resolves(); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: 'Report' } }, - sampleRes as any, - (null as unknown) as any - ); + const result = deleteAttachment.deleteAttachment(); - expect(actualResult).to.equal(null); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(null); + expect(deleteProjectAttachmentStub).to.be.calledOnce; + expect(fileUtilsStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.ts index a8bc75f46b..6d52f5aa20 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/delete.ts @@ -2,14 +2,12 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../constants/attachments'; import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; +import { getDBConnection } from '../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../services/attachment-service'; import { deleteFileFromS3 } from '../../../../../utils/file-utils'; import { getLogger } from '../../../../../utils/logger'; import { attachmentApiDocObject } from '../../../../../utils/shared-api-docs'; -import { deleteProjectReportAttachmentAuthors } from '../report/upload'; const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/delete'); @@ -38,7 +36,8 @@ POST.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -46,7 +45,8 @@ POST.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -57,14 +57,10 @@ POST.apiDoc = { 'application/json': { schema: { type: 'object', - required: ['attachmentType', 'securityToken'], + required: ['attachmentType'], properties: { attachmentType: { type: 'string' - }, - securityToken: { - type: 'string', - nullable: true } } } @@ -97,35 +93,20 @@ export function deleteAttachment(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'Delete attachment', message: 'params', req_params: req.params }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.body || !req.body.attachmentType) { - throw new HTTP400('Missing required body param `attachmentType`'); - } - const connection = getDBConnection(req['keycloak_token']); try { await connection.open(); - // If the attachment record is currently secured, need to unsecure it prior to deleting it - if (req.body.securityToken) { - await unsecureProjectAttachmentRecord(req.body.securityToken, req.body.attachmentType, connection); - } + const attachmentService = new AttachmentService(connection); let deleteResult: { key: string }; if (req.body.attachmentType === ATTACHMENT_TYPE.REPORT) { - await deleteProjectReportAttachmentAuthors(Number(req.params.attachmentId), connection); + await attachmentService.deleteProjectReportAttachmentAuthors(Number(req.params.attachmentId)); - deleteResult = await deleteProjectReportAttachment(Number(req.params.attachmentId), connection); + deleteResult = await attachmentService.deleteProjectReportAttachment(Number(req.params.attachmentId)); } else { - deleteResult = await deleteProjectAttachment(Number(req.params.attachmentId), connection); + deleteResult = await attachmentService.deleteProjectAttachment(Number(req.params.attachmentId)); } await connection.commit(); @@ -146,65 +127,3 @@ export function deleteAttachment(): RequestHandler { } }; } - -const unsecureProjectAttachmentRecord = async ( - securityToken: any, - attachmentType: string, - connection: IDBConnection -): Promise => { - const unsecureRecordSQLStatement = - attachmentType === 'Report' - ? queries.security.unsecureAttachmentRecordSQL('project_report_attachment', securityToken) - : queries.security.unsecureAttachmentRecordSQL('project_attachment', securityToken); - - if (!unsecureRecordSQLStatement) { - throw new HTTP400('Failed to build SQL unsecure record statement'); - } - - const unsecureRecordSQLResponse = await connection.query( - unsecureRecordSQLStatement.text, - unsecureRecordSQLStatement.values - ); - - if (!unsecureRecordSQLResponse || !unsecureRecordSQLResponse.rowCount) { - throw new HTTP400('Failed to unsecure record'); - } -}; - -export const deleteProjectAttachment = async ( - attachmentId: number, - connection: IDBConnection -): Promise<{ key: string }> => { - const sqlStatement = queries.project.deleteProjectAttachmentSQL(attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete project attachment statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to delete project attachment record'); - } - - return response.rows[0]; -}; - -export const deleteProjectReportAttachment = async ( - attachmentId: number, - connection: IDBConnection -): Promise<{ key: string }> => { - const sqlStatement = queries.project.deleteProjectReportAttachmentSQL(attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete project report attachment statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to delete project attachment report record'); - } - - return response.rows[0]; -}; diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts index 27d275e478..789fdb865c 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts @@ -2,11 +2,9 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import { ATTACHMENT_TYPE } from '../../../../../constants/attachments'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import project_queries from '../../../../../queries/project'; +import { HTTPError } from '../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../services/attachment-service'; import * as file_utils from '../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../__mocks__/db'; import * as get_signed_url from './getSignedUrl'; @@ -18,100 +16,25 @@ describe('getProjectAttachmentSignedURL', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - attachmentId: 2 - }, - query: { - attachmentType: 'Other' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getProjectAttachmentSignedURL(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getProjectAttachmentSignedURL(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should return null when getting signed url from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves(null); - - const result = get_signed_url.getProjectAttachmentSignedURL(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - describe('non report attachments', () => { - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + describe('report attachments', () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'getProjectReportAttachmentS3Key').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: { attachments: [], security_ids: [] }, + params: { + projectId: 1, + attachmentId: 1 + }, + query: { + attachmentType: 'Report' } - }); - - sinon.stub(project_queries, 'getProjectAttachmentS3KeySQL').returns(null); + } as any; try { const result = get_signed_url.getProjectAttachmentSignedURL(); @@ -119,96 +42,96 @@ describe('getProjectAttachmentSignedURL', () => { await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build attachment S3 key SQLstatement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); it('should return the signed url response on success', async () => { - const mockQuery = sinon.stub(); + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const getProjectReportAttachmentS3KeyStub = sinon + .stub(AttachmentService.prototype, 'getProjectReportAttachmentS3Key') + .resolves('key'); + + const sampleReq = { + keycloak_token: {}, + body: { attachments: [], security_ids: [] }, + params: { + projectId: 1, + attachmentId: 1 + }, + query: { + attachmentType: 'Report' + } + } as any; - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); + const getS3SignedURLStub = sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); + let actualResult: any = null; - sinon.stub(project_queries, 'getProjectAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); + const sampleRes = { + status: () => { + return { + json: (result: any) => { + actualResult = result; + } + }; + } + }; const result = get_signed_url.getProjectAttachmentSignedURL(); await result(sampleReq, sampleRes as any, (null as unknown) as any); expect(actualResult).to.eql('myurlsigned.com'); + expect(getProjectReportAttachmentS3KeyStub).to.be.calledOnce; + expect(getS3SignedURLStub).to.be.calledOnce; }); }); - describe('report attachments', () => { - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + describe('non report attachments', () => { + it('should return the signed url response on success', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const getProjectAttachmentS3KeyStub = sinon + .stub(AttachmentService.prototype, 'getProjectAttachmentS3Key') + .resolves('key'); + + const sampleReq = { + keycloak_token: {}, + body: { attachments: [], security_ids: [] }, + params: { + projectId: 1, + attachmentId: 1 + }, + query: { + attachmentType: 'Other' } - }); + } as any; - sinon.stub(project_queries, 'getProjectReportAttachmentS3KeySQL').returns(null); + const getS3SignedURLStub = sinon.stub(file_utils, 'getS3SignedURL').resolves(); - try { - const result = get_signed_url.getProjectAttachmentSignedURL(); + let actualResult: any = null; - await result( - { - ...sampleReq, - query: { - attachmentType: ATTACHMENT_TYPE.REPORT + const sampleRes = { + status: () => { + return { + json: (result: any) => { + actualResult = result; } - }, - sampleRes as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build report attachment S3 key SQLstatement'); - } - }); - - it('should return the signed url response on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectReportAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); + }; + } + }; const result = get_signed_url.getProjectAttachmentSignedURL(); - await result( - { - ...sampleReq, - query: { - attachmentType: ATTACHMENT_TYPE.REPORT - } - }, - sampleRes as any, - (null as unknown) as any - ); + await result(sampleReq, sampleRes as any, (null as unknown) as any); - expect(actualResult).to.eql('myurlsigned.com'); + expect(actualResult).to.eql(null); + expect(getProjectAttachmentS3KeyStub).to.be.calledOnce; + expect(getS3SignedURLStub).to.be.calledOnce; }); }); }); diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts index 85ec18be88..75ad5c4724 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts @@ -2,10 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../constants/attachments'; import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; +import { getDBConnection } from '../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../services/attachment-service'; import { getS3SignedURL } from '../../../../../utils/file-utils'; import { getLogger } from '../../../../../utils/logger'; @@ -39,7 +38,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -47,7 +47,8 @@ GET.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -100,36 +101,23 @@ export function getProjectAttachmentSignedURL(): RequestHandler { req_body: req.body }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.query.attachmentType) { - throw new HTTP400('Missing required query param `attachmentType`'); - } - const connection = getDBConnection(req['keycloak_token']); try { await connection.open(); let s3Key; + const attachmentService = new AttachmentService(connection); if (req.query.attachmentType === ATTACHMENT_TYPE.REPORT) { - s3Key = await getProjectReportAttachmentS3Key( + s3Key = await attachmentService.getProjectReportAttachmentS3Key( Number(req.params.projectId), - Number(req.params.attachmentId), - connection + Number(req.params.attachmentId) ); } else { - s3Key = await getProjectAttachmentS3Key( + s3Key = await attachmentService.getProjectAttachmentS3Key( Number(req.params.projectId), - Number(req.params.attachmentId), - connection + Number(req.params.attachmentId) ); } @@ -151,43 +139,3 @@ export function getProjectAttachmentSignedURL(): RequestHandler { } }; } - -export const getProjectAttachmentS3Key = async ( - projectId: number, - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.project.getProjectAttachmentS3KeySQL(projectId, attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build attachment S3 key SQLstatement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to get attachment S3 key'); - } - - return response.rows[0].key; -}; - -export const getProjectReportAttachmentS3Key = async ( - projectId: number, - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.project.getProjectReportAttachmentS3KeySQL(projectId, attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build report attachment S3 key SQLstatement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to get attachment S3 key'); - } - - return response.rows[0].key; -}; diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeSecure.test.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeSecure.test.ts deleted file mode 100644 index ffc78b7555..0000000000 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeSecure.test.ts +++ /dev/null @@ -1,171 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import security_queries from '../../../../../queries/security'; -import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as makeSecure from './makeSecure'; - -chai.use(sinonChai); - -describe('makeProjectAttachmentSecure', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - attachmentId: 2 - }, - body: { - attachmentType: 'Image' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeProjectAttachmentSecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeProjectAttachmentSecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeProjectAttachmentSecure(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param `attachmentType`'); - } - }); - - it('should throw an error when fails to build secureAttachmentRecordSQL statement', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(null); - - try { - const result = makeSecure.makeProjectAttachmentSecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL secure record statement'); - } - }); - - it('should throw an error when fails to secure record', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: null - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(SQL`something`); - - try { - const result = makeSecure.makeProjectAttachmentSecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to secure record'); - } - }); - - it('should work on success when type is not Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeSecure.makeProjectAttachmentSecure(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(1); - }); - - it('should work on success when type is Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeSecure.makeProjectAttachmentSecure(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: 'Report' } }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.equal(1); - }); -}); diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeSecure.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeSecure.ts deleted file mode 100644 index 8a48ee0858..0000000000 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeSecure.ts +++ /dev/null @@ -1,161 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../../utils/logger'; - -const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/makeSecure'); - -export const PUT: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - makeProjectAttachmentSecure() -]; - -PUT.apiDoc = { - description: 'Make security status of a project attachment secure.', - tags: ['attachment', 'security_status'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'attachmentId', - schema: { - type: 'number' - }, - required: true - } - ], - requestBody: { - description: 'Current attachment type for project attachment.', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['attachmentType'], - properties: { - attachmentType: { - type: 'string' - } - } - } - } - } - }, - responses: { - 200: { - description: 'Project attachment make secure security status response.', - content: { - 'application/json': { - schema: { - title: 'Row count of record for which security status has been made secure', - type: 'number' - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function makeProjectAttachmentSecure(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'Make security status of a project attachment secure', - message: 'params', - req_params: req.params - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.body || !req.body.attachmentType) { - throw new HTTP400('Missing required body param `attachmentType`'); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const secureRecordSQLStatement = - req.body.attachmentType === 'Report' - ? queries.security.secureAttachmentRecordSQL( - Number(req.params.attachmentId), - 'project_report_attachment', - Number(req.params.projectId) - ) - : queries.security.secureAttachmentRecordSQL( - Number(req.params.attachmentId), - 'project_attachment', - Number(req.params.projectId) - ); - - if (!secureRecordSQLStatement) { - throw new HTTP400('Failed to build SQL secure record statement'); - } - - const secureRecordSQLResponse = await connection.query( - secureRecordSQLStatement.text, - secureRecordSQLStatement.values - ); - - if (!secureRecordSQLResponse || !secureRecordSQLResponse.rowCount) { - throw new HTTP400('Failed to secure record'); - } - - await connection.commit(); - - return res.status(200).json(1); - } catch (error) { - defaultLog.error({ label: 'makeProjectAttachmentSecure', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeUnsecure.test.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeUnsecure.test.ts deleted file mode 100644 index b2a5c6ae95..0000000000 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeUnsecure.test.ts +++ /dev/null @@ -1,204 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import security_queries from '../../../../../queries/security'; -import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as makeUnsecure from './makeUnsecure'; - -chai.use(sinonChai); - -describe('makeProjectAttachmentUnsecure', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - attachmentId: 2 - }, - body: { - securityToken: 'sometoken', - attachmentType: 'Image' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when request body is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result({ ...sampleReq, body: null }, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required request body'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result( - { ...sampleReq, body: { attachmentType: null, securityToken: 'sometoken' } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required request body'); - } - }); - - it('should throw an error when securityToken is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result( - { ...sampleReq, body: { attachmentType: 'Image', securityToken: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required request body'); - } - }); - - it('should throw an error when fails to build unsecureRecordSQL statement', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(null); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL unsecure record statement'); - } - }); - - it('should throw an error when fails to unsecure record', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: null - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - try { - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to unsecure record'); - } - }); - - it('should work on success when type is not Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(1); - }); - - it('should work on success when type is Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeUnsecure.makeProjectAttachmentUnsecure(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: 'Report' } }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.equal(1); - }); -}); diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeUnsecure.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeUnsecure.ts deleted file mode 100644 index 808a95aa9f..0000000000 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/makeUnsecure.ts +++ /dev/null @@ -1,156 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../../utils/logger'; - -const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/makeUnsecure'); - -export const PUT: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - makeProjectAttachmentUnsecure() -]; - -PUT.apiDoc = { - description: 'Make security status of a project attachment unsecure.', - tags: ['attachment', 'security_status'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'attachmentId', - schema: { - type: 'number' - }, - required: true - } - ], - requestBody: { - description: 'Current security token value and attachment type for project attachment.', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['attachmentType', 'securityToken'], - properties: { - attachmentType: { - type: 'string' - }, - securityToken: { - type: 'string' - } - } - } - } - } - }, - responses: { - 200: { - description: 'Project attachment make unsecure security status response.', - content: { - 'application/json': { - schema: { - title: 'Row count of record for which security status has been made unsecure', - type: 'number' - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function makeProjectAttachmentUnsecure(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'Make security status of a project attachment unsecure', - message: 'params', - req_params: req.params - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.body || !req.body.attachmentType || !req.body.securityToken) { - throw new HTTP400('Missing required request body'); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const unsecureRecordSQLStatement = - req.body.attachmentType === 'Report' - ? queries.security.unsecureAttachmentRecordSQL('project_report_attachment', req.body.securityToken) - : queries.security.unsecureAttachmentRecordSQL('project_attachment', req.body.securityToken); - - if (!unsecureRecordSQLStatement) { - throw new HTTP400('Failed to build SQL unsecure record statement'); - } - - const unsecureRecordSQLResponse = await connection.query( - unsecureRecordSQLStatement.text, - unsecureRecordSQLStatement.values - ); - - if (!unsecureRecordSQLResponse || !unsecureRecordSQLResponse.rowCount) { - throw new HTTP400('Failed to unsecure record'); - } - - await connection.commit(); - - return res.status(200).json(1); - } catch (error) { - defaultLog.error({ label: 'makeProjectAttachmentUnsecure', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts index 63d00b9428..ed18370798 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts @@ -2,160 +2,91 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../database/db'; -import { HTTPError } from '../../../../../../errors/custom-error'; -import project_queries from '../../../../../../queries/project'; +import { HTTPError } from '../../../../../../errors/http-error'; +import { + IProjectReportAttachment, + IReportAttachmentAuthor +} from '../../../../../../repositories/attachment-repository'; +import { AttachmentService } from '../../../../../../services/attachment-service'; import { getMockDBConnection } from '../../../../../../__mocks__/db'; -import * as get_project_metadata from './get'; +import * as get from './get'; chai.use(sinonChai); -describe('gets metadata for a project report', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1, - attachmentId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - +describe('getProjectReportDetails', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { + it('should throw an error if failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = get_project_metadata.getProjectReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no attachmentId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_project_metadata.getProjectReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectReportAttachmentSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(project_queries, 'getProjectReportAttachmentSQL').returns(null); - - try { - const result = get_project_metadata.getProjectReportMetaData(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build metadata SQLStatement'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectReportAuthorsSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 + }, + body: {} + } as any; - sinon.stub(project_queries, 'getProjectReportAuthorsSQL').returns(null); + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'getProjectReportAttachmentById').rejects(expectedError); try { - const result = get_project_metadata.getProjectReportMetaData(); + const result = get.getProjectReportDetails(); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build metadata SQLStatement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return a project report metadata, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - attachment_id: 1, - title: 'My report', - update_date: '2020-10-10', - description: 'some description', - year_published: 2020, - revision_count: '1' - } - ] - }); - mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ first_name: 'John', last_name: 'Smith' }] }); + it('should succeed with valid params', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectReportAttachmentSQL').returns(SQL`something`); - sinon.stub(project_queries, 'getProjectReportAuthorsSQL').returns(SQL`something`); - - const result = get_project_metadata.getProjectReportMetaData(); + body: {} + } as any; + + const getProjectReportAttachmentByIdStub = sinon + .stub(AttachmentService.prototype, 'getProjectReportAttachmentById') + .resolves(({ report: 1 } as unknown) as IProjectReportAttachment); + + const getProjectReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'getProjectReportAttachmentAuthors') + .resolves([({ author: 2 } as unknown) as IReportAttachmentAuthor]); + + const expectedResponse = { + metadata: { report: 1 }, + authors: [{ author: 2 }] + }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = get.getProjectReportDetails(); + await result(mockReq, (sampleRes as unknown) as any, (null as unknown) as any); - expect(actualResult).to.be.eql({ - attachment_id: 1, - title: 'My report', - last_modified: '2020-10-10', - description: 'some description', - year_published: 2020, - revision_count: '1', - authors: [{ first_name: 'John', last_name: 'Smith' }] - }); + expect(actualResult).to.eql(expectedResponse); + expect(getProjectReportAttachmentByIdStub).to.be.calledOnce; + expect(getProjectReportAttachmentAuthorsStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.ts index fbcc1b7773..e55e573128 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/get.ts @@ -1,11 +1,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../../../../../constants/roles'; +import { PROJECT_ROLE } from '../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../database/db'; -import { HTTP400 } from '../../../../../../errors/custom-error'; -import { GetReportAttachmentMetadata } from '../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../services/attachment-service'; import { getLogger } from '../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/getSignedUrl'); @@ -22,7 +20,7 @@ export const GET: Operation = [ ] }; }), - getProjectReportMetaData() + getProjectReportDetails() ]; GET.apiDoc = { @@ -30,7 +28,7 @@ GET.apiDoc = { tags: ['attachment'], security: [ { - Bearer: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR] + Bearer: [] } ], parameters: [ @@ -38,7 +36,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -46,7 +45,8 @@ GET.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -59,39 +59,38 @@ GET.apiDoc = { schema: { title: 'metadata get response object', type: 'object', - required: [ - 'attachment_id', - 'title', - 'last_modified', - 'description', - 'year_published', - 'revision_count', - 'authors' - ], + required: ['metadata', 'authors'], properties: { - attachment_id: { - description: 'Report metadata attachment id', - type: 'number' - }, - title: { - description: 'Report metadata attachment title ', - type: 'string' - }, - last_modified: { - description: 'Report metadata last modified', - type: 'string' - }, - description: { - description: 'Report metadata description', - type: 'string' - }, - year_published: { - description: 'Report metadata year published', - type: 'number' - }, - revision_count: { - description: 'Report metadata revision count', - type: 'number' + metadata: { + description: 'Report metadata general information object', + type: 'object', + required: ['id', 'title', 'last_modified', 'description', 'year_published', 'revision_count'], + properties: { + id: { + description: 'Report metadata attachment id', + type: 'number' + }, + title: { + description: 'Report metadata attachment title ', + type: 'string' + }, + last_modified: { + description: 'Report metadata last modified', + type: 'string' + }, + description: { + description: 'Report metadata description', + type: 'string' + }, + year_published: { + description: 'Report metadata year published', + type: 'number' + }, + revision_count: { + description: 'Report metadata revision count', + type: 'number' + } + } }, authors: { description: 'Report metadata author object', @@ -132,62 +131,41 @@ GET.apiDoc = { } }; -export function getProjectReportMetaData(): RequestHandler { +export function getProjectReportDetails(): RequestHandler { return async (req, res) => { defaultLog.debug({ - label: 'getProjectReportMetaData', + label: 'getProjectReportDetails', message: 'params', req_params: req.params, req_query: req.query }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - const connection = getDBConnection(req['keycloak_token']); try { - const getProjectReportAttachmentSQLStatement = queries.project.getProjectReportAttachmentSQL( + await connection.open(); + + const attachmentService = new AttachmentService(connection); + + const projectReportAttachment = await attachmentService.getProjectReportAttachmentById( Number(req.params.projectId), Number(req.params.attachmentId) ); - const getProjectReportAuthorsSQLStatement = queries.project.getProjectReportAuthorsSQL( + const projectReportAuthors = await attachmentService.getProjectReportAttachmentAuthors( Number(req.params.attachmentId) ); - if (!getProjectReportAttachmentSQLStatement || !getProjectReportAuthorsSQLStatement) { - throw new HTTP400('Failed to build metadata SQLStatement'); - } - - await connection.open(); - - const reportMetaData = await connection.query( - getProjectReportAttachmentSQLStatement.text, - getProjectReportAttachmentSQLStatement.values - ); - - const reportAuthorsData = await connection.query( - getProjectReportAuthorsSQLStatement.text, - getProjectReportAuthorsSQLStatement.values - ); - await connection.commit(); - const getReportMetaData = reportMetaData && reportMetaData.rows[0]; - - const getReportAuthorsData = reportAuthorsData && reportAuthorsData.rows; - - const reportMetaObj = new GetReportAttachmentMetadata(getReportMetaData, getReportAuthorsData); + const reportDetails = { + metadata: projectReportAttachment, + authors: projectReportAuthors + }; - return res.status(200).json(reportMetaObj); + return res.status(200).json(reportDetails); } catch (error) { - defaultLog.error({ label: 'getReportMetadata', message: 'error', error }); + defaultLog.error({ label: 'getProjectReportDetails', message: 'error', error }); await connection.rollback(); throw error; } finally { diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.test.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.test.ts index 1b5e845ccd..f1a4f70246 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.test.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.test.ts @@ -2,11 +2,10 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../database/db'; -import { HTTPError } from '../../../../../../errors/custom-error'; -import project_queries from '../../../../../../queries/project'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../__mocks__/db'; +import { HTTPError } from '../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../services/attachment-service'; +import { getMockDBConnection } from '../../../../../../__mocks__/db'; import * as update_project_metadata from './update'; chai.use(sinonChai); @@ -16,264 +15,100 @@ describe('updates metadata for a project report', () => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { + it('should throw a 400 error when the response is null', async () => { const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const requestHandler = update_project_metadata.updateProjectAttachmentMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no attachmentId is provided', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - attachmentId: '' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] + const sampleReq = { + keycloak_token: {}, + body: { + attachment_type: 'Report', + revision_count: 1, + attachment_meta: { + title: 'My report', + year_published: 2000, + description: 'report abstract', + authors: [ + { + first_name: 'John', + last_name: 'Smith' + } + ] + } + }, + params: { + projectId: '1', + attachmentId: '1' } - }; + } as any; - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'updateProjectReportAttachmentMetadata').rejects(expectedError); try { - const requestHandler = update_project_metadata.updateProjectAttachmentMetadata(); + const result = update_project_metadata.updateProjectAttachmentMetadata(); - await requestHandler(mockReq, mockRes, mockNext); + await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw a 400 error when attachment_type is invalid', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'notAReport', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const requestHandler = update_project_metadata.updateProjectAttachmentMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Invalid body param `attachment_type`'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); it('should update a project report metadata, on success', async () => { const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [{ id: 1 }] - }); - mockQuery.onCall(1).resolves({ - rowCount: 1, - rows: [{ id: 1 }] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - const requestHandler = update_project_metadata.updateProjectAttachmentMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.statusValue).to.equal(200); - }); - - it('should throw a 400 error when updateProjectReportAttachmentMetadataSQL returns null', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] + const sampleReq = { + keycloak_token: {}, + body: { + attachment_type: 'Report', + revision_count: 1, + attachment_meta: { + title: 'My report', + year_published: 2000, + description: 'report abstract', + authors: [ + { + first_name: 'John', + last_name: 'Smith' + } + ] + } + }, + params: { + projectId: '1', + attachmentId: '1' } - }; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [{ id: 1 }] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(project_queries, 'updateProjectReportAttachmentMetadataSQL').returns(null); - - const requestHandler = update_project_metadata.updateProjectAttachmentMetadata(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL update attachment report statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('should throw a 400 error when the response is null', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' + } as any; + + const updateProjectReportAttachmentMetadataStub = sinon + .stub(AttachmentService.prototype, 'updateProjectReportAttachmentMetadata') + .resolves(); + const deleteProjectReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'deleteProjectReportAttachmentAuthors') + .resolves(); + const insertProjectReportAttachmentAuthorStub = sinon + .stub(AttachmentService.prototype, 'insertProjectReportAttachmentAuthor') + .resolves(); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + send: (response: any) => { + actualResult = response; } - ] + }; } }; - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: null - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(project_queries, 'updateProjectReportAttachmentMetadataSQL').returns(SQL`something`); - const requestHandler = update_project_metadata.updateProjectAttachmentMetadata(); + await requestHandler(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to update attachment report record'); - expect((actualError as HTTPError).status).to.equal(400); - } + expect(actualResult).to.equal(undefined); + expect(updateProjectReportAttachmentMetadataStub).to.be.calledOnce; + expect(deleteProjectReportAttachmentAuthorsStub).to.be.calledOnce; + expect(insertProjectReportAttachmentAuthorStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.ts b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.ts index b0362c7ad4..59519b8505 100644 --- a/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.ts +++ b/api/src/paths/project/{projectId}/attachments/{attachmentId}/metadata/update.ts @@ -2,13 +2,14 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../../constants/attachments'; import { PROJECT_ROLE } from '../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../database/db'; -import { HTTP400 } from '../../../../../../errors/custom-error'; -import { PutReportAttachmentMetadata } from '../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../queries/queries'; +import { getDBConnection } from '../../../../../../database/db'; +import { + IReportAttachmentAuthor, + PutReportAttachmentMetadata +} from '../../../../../../models/project-survey-attachments'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../services/attachment-service'; import { getLogger } from '../../../../../../utils/logger'; -import { deleteProjectReportAttachmentAuthors, insertProjectReportAttachmentAuthor } from '../../report/upload'; const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/metadata/update'); @@ -40,7 +41,8 @@ PUT.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -48,7 +50,8 @@ PUT.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -133,18 +136,6 @@ export function updateProjectAttachmentMetadata(): RequestHandler { req_body: req.body }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!Object.values(ATTACHMENT_TYPE).includes(req.body?.attachment_type)) { - throw new HTTP400('Invalid body param `attachment_type`'); - } - const connection = getDBConnection(req['keycloak_token']); try { @@ -156,23 +147,24 @@ export function updateProjectAttachmentMetadata(): RequestHandler { revision_count: req.body.revision_count }); + const attachmentService = new AttachmentService(connection); + // Update the metadata fields of the attachment record - await updateProjectReportAttachmentMetadata( + await attachmentService.updateProjectReportAttachmentMetadata( Number(req.params.projectId), Number(req.params.attachmentId), - metadata, - connection + metadata ); // Delete any existing attachment author records - await deleteProjectReportAttachmentAuthors(Number(req.params.attachmentId), connection); + await attachmentService.deleteProjectReportAttachmentAuthors(Number(req.params.attachmentId)); const promises = []; // Insert any new attachment author records promises.push( - metadata.authors.map((author) => - insertProjectReportAttachmentAuthor(Number(req.params.attachmentId), author, connection) + metadata.authors.map((author: IReportAttachmentAuthor) => + attachmentService.insertProjectReportAttachmentAuthor(Number(req.params.attachmentId), author) ) ); @@ -191,22 +183,3 @@ export function updateProjectAttachmentMetadata(): RequestHandler { } }; } - -const updateProjectReportAttachmentMetadata = async ( - projectId: number, - attachmentId: number, - metadata: PutReportAttachmentMetadata, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.project.updateProjectReportAttachmentMetadataSQL(projectId, attachmentId, metadata); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update attachment report statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to update attachment report record'); - } -}; diff --git a/api/src/paths/project/{projectId}/delete.test.ts b/api/src/paths/project/{projectId}/delete.test.ts index 69e3571468..5a052b9ac7 100644 --- a/api/src/paths/project/{projectId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/delete.test.ts @@ -1,18 +1,13 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; -import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import { SYSTEM_ROLE } from '../../../constants/roles'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; -import project_queries from '../../../queries/project'; -import survey_queries from '../../../queries/survey'; -import * as file_utils from '../../../utils/file-utils'; +import { HTTPError } from '../../../errors/http-error'; +import { ProjectService } from '../../../services/project-service'; import { getMockDBConnection } from '../../../__mocks__/db'; import * as delete_project from './delete'; -import * as survey_delete from './survey/{surveyId}/delete'; chai.use(sinonChai); @@ -21,228 +16,17 @@ describe('deleteProject', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1 - }, - system_user: { role_names: [SYSTEM_ROLE.SYSTEM_ADMIN] } - } as any; - - let actualResult = { - id: null - }; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - it('should throw an error when projectId is missing', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = delete_project.deleteProject(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param: `projectId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(project_queries, 'getProjectSQL').returns(null); - - try { - const result = delete_project.deleteProject(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should throw a 400 error when fails to get the project cause no rows', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: async () => { - return { - rows: [null] - } as QueryResult; - } - }); - - sinon.stub(project_queries, 'getProjectSQL').returns(SQL`some`); - - try { - const result = delete_project.deleteProject(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get the project'); - } - }); - - it('should throw a 400 error when fails to get the project cause no id', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: async () => { - return { - rows: [ - { - id: null - } - ] - } as QueryResult; - } - }); - - sinon.stub(project_queries, 'getProjectSQL').returns(SQL`some`); - - try { - const result = delete_project.deleteProject(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get the project'); - } - }); - - it('should throw a 400 error when user has insufficient role to delete published project', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const sampleReq = { + keycloak_token: {}, + params: { + projectId: null }, - query: async () => { - return { - rowCount: 1, - rows: [ - { - id: 1, - publish_date: 'some date' - } - ] - } as QueryResult; - } - }); - - try { - const result = delete_project.deleteProject(); - - await result( - { ...sampleReq, system_user: { role_names: [SYSTEM_ROLE.PROJECT_CREATOR] } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal( - 'Cannot delete a published project if you are not a system administrator.' - ); - } - }); - - it('should throw a 400 error when failed to get result for project attachments', async () => { - const mockQuery = sinon.stub(); - - // mock project query - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - id: 1 - } - ] - }); - - // mock attachments query - mockQuery.onCall(1).resolves({ rows: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getSurveyIdsSQL').returns(SQL`something`); - - try { - const result = delete_project.deleteProject(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get project attachments'); - } - }); - - it('should throw a 400 error when failed to get result for survey ids', async () => { - const mockQuery = sinon.stub(); - - // mock project query - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - id: 1 - } - ] - }); - - // mock attachments query - mockQuery.onCall(1).resolves({ rows: [] }); - - // mock survey query - mockQuery.onCall(2).resolves({ rows: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getSurveyIdsSQL').returns(SQL`something`); + system_user: { role_names: [SYSTEM_ROLE.SYSTEM_ADMIN] } + } as any; try { const result = delete_project.deleteProject(); @@ -251,41 +35,24 @@ describe('deleteProject', () => { expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get survey ids associated to project'); + expect((actualError as HTTPError).message).to.equal('Missing required path param: `projectId`'); } }); - it('should throw a 400 error when failed to build deleteProjectSQL statement', async () => { - const mockQuery = sinon.stub(); - - // mock project query - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - id: 1 - } - ] - }); - - // mock attachments query - mockQuery.onCall(1).resolves({ rows: [{ key: 'key' }] }); + it('should throw an error if failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - // mock survey query - mockQuery.onCall(2).resolves({ rows: [{ id: 1 }] }); + const expectedError = new Error('cannot process request'); + sinon.stub(ProjectService.prototype, 'deleteProject').rejects(expectedError); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const sampleReq = { + keycloak_token: {}, + params: { + projectId: 1 }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getSurveyIdsSQL').returns(SQL`something`); - sinon.stub(survey_delete, 'getSurveyAttachmentS3Keys').resolves(['key1', 'key2']); - sinon.stub(project_queries, 'deleteProjectSQL').returns(null); + system_user: { role_names: [SYSTEM_ROLE.SYSTEM_ADMIN] } + } as any; try { const result = delete_project.deleteProject(); @@ -293,94 +60,42 @@ describe('deleteProject', () => { await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return null when no delete result', async () => { - const mockQuery = sinon.stub(); - - // mock project query - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - id: 1 - } - ] - }); - - // mock attachments query - mockQuery.onCall(1).resolves({ rows: [{ key: 'key' }] }); - - // mock survey query - mockQuery.onCall(2).resolves({ rows: [{ id: 1 }] }); + it('should succeed with valid Id', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - // mock delete project query - mockQuery.onCall(3).resolves(); + const deleteProjectStub = sinon.stub(ProjectService.prototype, 'deleteProject').resolves(true); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const sampleReq = { + keycloak_token: {}, + params: { + projectId: 1 }, - query: mockQuery - }); + system_user: { role_names: [SYSTEM_ROLE.SYSTEM_ADMIN] } + } as any; - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getSurveyIdsSQL').returns(SQL`something`); - sinon.stub(survey_delete, 'getSurveyAttachmentS3Keys').resolves(['key1', 'key2']); - sinon.stub(project_queries, 'deleteProjectSQL').returns(SQL`some`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves(null); + const expectedResponse = true; - const result = delete_project.deleteProject(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - it('should return true on successful delete', async () => { - const mockQuery = sinon.stub(); - - // mock project query - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - id: 1 - } - ] - }); - - // mock attachments query - mockQuery.onCall(1).resolves({ rows: [{ key: 'key' }] }); - - // mock survey query - mockQuery.onCall(2).resolves({ rows: [{ id: 1 }] }); - - // mock delete project query - mockQuery.onCall(3).resolves(); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'getProjectAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getSurveyIdsSQL').returns(SQL`something`); - sinon.stub(survey_delete, 'getSurveyAttachmentS3Keys').resolves(['key1', 'key2']); - sinon.stub(project_queries, 'deleteProjectSQL').returns(SQL`some`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves({}); + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; const result = delete_project.deleteProject(); - await result(sampleReq, sampleRes as any, (null as unknown) as any); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); - expect(actualResult).to.equal(true); + expect(actualResult).to.eql(expectedResponse); + expect(deleteProjectStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/delete.ts b/api/src/paths/project/{projectId}/delete.ts index ccef35a2f8..8e3c448196 100644 --- a/api/src/paths/project/{projectId}/delete.ts +++ b/api/src/paths/project/{projectId}/delete.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../constants/roles'; import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; +import { HTTP400 } from '../../../errors/http-error'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; import { ProjectService } from '../../../services/project-service'; import { getLogger } from '../../../utils/logger'; @@ -73,14 +73,13 @@ export function deleteProject(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); const projectId = Number(req.params.projectId); - const userRoles = req['system_user']['role_names']; try { await connection.open(); const projectService = new ProjectService(connection); - const resp = await projectService.deleteProject(projectId, userRoles); + const resp = await projectService.deleteProject(projectId); await connection.commit(); diff --git a/api/src/paths/project/{projectId}/funding-sources/add.test.ts b/api/src/paths/project/{projectId}/funding-sources/add.test.ts deleted file mode 100644 index b804a31679..0000000000 --- a/api/src/paths/project/{projectId}/funding-sources/add.test.ts +++ /dev/null @@ -1,179 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; -import project_queries from '../../../../queries/project'; -import { getMockDBConnection } from '../../../../__mocks__/db'; -import * as addFunding from './add'; - -chai.use(sinonChai); - -describe('add a funding source', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: { - id: 0, - agency_id: 'agencyId', - investment_action_category: 1, - agency_project_id: 1, - funding_amount: 1, - start_date: '2021-01-01', - end_date: '2021-01-01' - }, - params: { - projectId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no projectId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = addFunding.addFundingSource(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no request body present', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const result = addFunding.addFundingSource(); - - await result({ ...sampleReq, body: null }, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing funding source data'); - } - }); - - it('should throw a 400 error when addFundingSource fails, because result has no rows', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'postProjectFundingSourceSQL').returns(SQL`some query`); - - try { - const result = addFunding.addFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to insert project funding source data'); - } - }); - - it('should throw a 400 error when no sql statement returned for addFundingSourceSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(project_queries, 'postProjectFundingSourceSQL').returns(null); - - try { - const result = addFunding.addFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build addFundingSourceSQLStatement'); - } - }); - - it('should throw a 400 error when the AddFundingSource fails because result has no id', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ id: null }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'postProjectFundingSourceSQL').returns(SQL`some query`); - - try { - const result = addFunding.addFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to insert project funding source data'); - } - }); - - it('should return the new funding source id on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ id: 23 }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(project_queries, 'postProjectFundingSourceSQL').returns(SQL`something`); - - const result = addFunding.addFundingSource(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql({ id: 23 }); - }); -}); diff --git a/api/src/paths/project/{projectId}/funding-sources/add.ts b/api/src/paths/project/{projectId}/funding-sources/add.ts deleted file mode 100644 index b161c173f4..0000000000 --- a/api/src/paths/project/{projectId}/funding-sources/add.ts +++ /dev/null @@ -1,83 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../constants/roles'; -import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; -import { PostFundingSource } from '../../../../models/project-create'; -import { queries } from '../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../utils/logger'; -import { addFundingSourceApiDocObject } from '../../../../utils/shared-api-docs'; - -const defaultLog = getLogger('/api/projects/{projectId}/funding-sources/add'); - -export const POST: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - addFundingSource() -]; - -POST.apiDoc = addFundingSourceApiDocObject('Add a funding source of a project.', 'new project funding source id'); - -export function addFundingSource(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'Add project funding source', - message: 'params and body', - 'req.params': req.params, - 'req.body': req.body - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - const connection = getDBConnection(req['keycloak_token']); - - const sanitizedPostFundingSource = req.body && new PostFundingSource(req.body); - - if (!sanitizedPostFundingSource) { - throw new HTTP400('Missing funding source data'); - } - - try { - await connection.open(); - - const addFundingSourceSQLStatement = queries.project.postProjectFundingSourceSQL( - sanitizedPostFundingSource, - Number(req.params.projectId) - ); - - if (!addFundingSourceSQLStatement) { - throw new HTTP400('Failed to build addFundingSourceSQLStatement'); - } - - const response = await connection.query(addFundingSourceSQLStatement.text, addFundingSourceSQLStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project funding source data'); - } - - await connection.commit(); - - return res.status(200).json({ id: result.id }); - } catch (error) { - defaultLog.error({ label: 'addFundingSource', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/funding-sources/{pfsId}/delete.test.ts b/api/src/paths/project/{projectId}/funding-sources/{pfsId}/delete.test.ts deleted file mode 100644 index 27a5d92a72..0000000000 --- a/api/src/paths/project/{projectId}/funding-sources/{pfsId}/delete.test.ts +++ /dev/null @@ -1,197 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import project_queries from '../../../../../queries/project'; -import survey_queries from '../../../../../queries/survey'; -import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as deleteFundingSource from './delete'; - -chai.use(sinonChai); - -describe('delete a funding source', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1, - pfsId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no projectId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = deleteFundingSource.deleteFundingSource(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no pfsId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = deleteFundingSource.deleteFundingSource(); - await result( - { ...sampleReq, params: { ...sampleReq.params, pfsId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `pfsId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for deleteSurveyFundingSourceByProjectFundingSourceIdSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'deleteSurveyFundingSourceByProjectFundingSourceIdSQL').returns(null); - sinon.stub(project_queries, 'deleteProjectFundingSourceSQL').returns(SQL`some`); - - try { - const result = deleteFundingSource.deleteFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); - } - }); - - it('should throw a 400 error when no sql statement returned for deleteProjectFundingSourceSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'deleteSurveyFundingSourceByProjectFundingSourceIdSQL').returns(SQL`some`); - sinon.stub(project_queries, 'deleteProjectFundingSourceSQL').returns(null); - - try { - const result = deleteFundingSource.deleteFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); - } - }); - - it('should return the row count of the removed funding source on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rowCount: 1 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'deleteSurveyFundingSourceByProjectFundingSourceIdSQL').returns(SQL`some`); - sinon.stub(project_queries, 'deleteProjectFundingSourceSQL').returns(SQL`something`); - - const result = deleteFundingSource.deleteFundingSource(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql(1); - }); - - it('throws a 400 error when delete survey fundingSource fails, because the response has no rows', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [], rowCount: 0 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'deleteSurveyFundingSourceByProjectFundingSourceIdSQL').returns(SQL`some`); - sinon.stub(project_queries, 'deleteProjectFundingSourceSQL').returns(SQL`some query`); - - try { - const result = deleteFundingSource.deleteFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to delete project funding source'); - } - }); - - it('throws a 400 error when delete project fundingSource fails, because the response has no rows', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ rows: [], rowCount: 1 }).onSecondCall().resolves({ rows: [], rowCount: 0 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'deleteSurveyFundingSourceByProjectFundingSourceIdSQL').returns(SQL`some`); - sinon.stub(project_queries, 'deleteProjectFundingSourceSQL').returns(SQL`some query`); - - try { - const result = deleteFundingSource.deleteFundingSource(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to delete project funding source'); - } - }); -}); diff --git a/api/src/paths/project/{projectId}/funding-sources/{pfsId}/delete.ts b/api/src/paths/project/{projectId}/funding-sources/{pfsId}/delete.ts deleted file mode 100644 index f5494918dd..0000000000 --- a/api/src/paths/project/{projectId}/funding-sources/{pfsId}/delete.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../../utils/logger'; -import { deleteFundingSourceApiDocObject } from '../../../../../utils/shared-api-docs'; - -const defaultLog = getLogger('/api/projects/{projectId}/funding-sources/{pfsId}/delete'); - -export const DELETE: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.query.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - deleteFundingSource() -]; - -DELETE.apiDoc = deleteFundingSourceApiDocObject( - 'Delete a funding source of a project.', - 'Row count of successfully deleted funding sources' -); - -export function deleteFundingSource(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ label: 'Delete project funding source', message: 'params', req_params: req.params }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.pfsId) { - throw new HTTP400('Missing required path param `pfsId`'); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const surveyFundingSourceDeleteStatement = queries.survey.deleteSurveyFundingSourceByProjectFundingSourceIdSQL( - Number(req.params.pfsId) - ); - - const deleteProjectFundingSourceSQLStatement = queries.project.deleteProjectFundingSourceSQL( - Number(req.params.projectId), - Number(req.params.pfsId) - ); - - if (!deleteProjectFundingSourceSQLStatement || !surveyFundingSourceDeleteStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - await connection.query(surveyFundingSourceDeleteStatement.text, surveyFundingSourceDeleteStatement.values); - - const projectFundingSourceDeleteResponse = await connection.query( - deleteProjectFundingSourceSQLStatement.text, - deleteProjectFundingSourceSQLStatement.values - ); - - if (!projectFundingSourceDeleteResponse.rowCount) { - throw new HTTP400('Failed to delete project funding source'); - } - - await connection.commit(); - - return res.status(200).json(projectFundingSourceDeleteResponse && projectFundingSourceDeleteResponse.rowCount); - } catch (error) { - defaultLog.error({ label: 'deleteFundingSource', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/participants/create.test.ts b/api/src/paths/project/{projectId}/participants/create.test.ts index 350d864821..adfda96294 100644 --- a/api/src/paths/project/{projectId}/participants/create.test.ts +++ b/api/src/paths/project/{projectId}/participants/create.test.ts @@ -4,7 +4,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import { SYSTEM_IDENTITY_SOURCE } from '../../../../constants/database'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; +import { HTTPError } from '../../../../errors/http-error'; import { UserService } from '../../../../services/user-service'; import { getMockDBConnection } from '../../../../__mocks__/db'; import * as create_project_participants from './create'; diff --git a/api/src/paths/project/{projectId}/participants/create.ts b/api/src/paths/project/{projectId}/participants/create.ts index b06dd1d135..310059c074 100644 --- a/api/src/paths/project/{projectId}/participants/create.ts +++ b/api/src/paths/project/{projectId}/participants/create.ts @@ -3,7 +3,7 @@ import { Operation } from 'express-openapi'; import { SYSTEM_IDENTITY_SOURCE } from '../../../../constants/database'; import { PROJECT_ROLE } from '../../../../constants/roles'; import { getDBConnection, IDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; +import { HTTP400 } from '../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; import { ProjectService } from '../../../../services/project-service'; import { UserService } from '../../../../services/user-service'; @@ -39,7 +39,8 @@ POST.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -63,7 +64,11 @@ POST.apiDoc = { }, identitySource: { type: 'string', - enum: [SYSTEM_IDENTITY_SOURCE.IDIR, SYSTEM_IDENTITY_SOURCE.BCEID] + enum: [ + SYSTEM_IDENTITY_SOURCE.IDIR, + SYSTEM_IDENTITY_SOURCE.BCEID_BASIC, + SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS + ] }, roleId: { description: 'The id of the project role to assign to the participant.', @@ -99,9 +104,13 @@ POST.apiDoc = { } }; +type Participant = { userIdentifier: string; identitySource: string; roleId: number }; + export function createProjectParticipants(): RequestHandler { return async (req, res) => { - if (!req.params.projectId) { + const projectId = Number(req.params.projectId); + + if (!projectId) { throw new HTTP400('Missing required param `projectId`'); } @@ -112,17 +121,13 @@ export function createProjectParticipants(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); try { - const projectId = Number(req.params.projectId); - - const participants: { userIdentifier: string; identitySource: string; roleId: number }[] = req.body.participants; + const participants: Participant[] = req.body.participants; await connection.open(); - const promises: Promise[] = []; - - participants.forEach((participant) => - promises.push(ensureSystemUserAndProjectParticipantUser(projectId, participant, connection)) - ); + const promises: Promise[] = participants.map((participant) => { + return ensureSystemUserAndProjectParticipantUser(projectId, { ...participant, userGuid: null }, connection); + }); await Promise.all(promises); @@ -140,13 +145,17 @@ export function createProjectParticipants(): RequestHandler { export const ensureSystemUserAndProjectParticipantUser = async ( projectId: number, - participant: { userIdentifier: string; identitySource: string; roleId: number }, + participant: Participant & { userGuid: string | null }, connection: IDBConnection ) => { const userService = new UserService(connection); - // Add a system user, unless they already have one - const systemUserObject = await userService.ensureSystemUser(participant.userIdentifier, participant.identitySource); + // Create or activate the system user + const systemUserObject = await userService.ensureSystemUser( + participant.userGuid, + participant.userIdentifier, + participant.identitySource + ); const projectService = new ProjectService(connection); diff --git a/api/src/paths/project/{projectId}/participants/get.test.ts b/api/src/paths/project/{projectId}/participants/get.test.ts index bfd1fdfdaa..ef1eb0ccd0 100644 --- a/api/src/paths/project/{projectId}/participants/get.test.ts +++ b/api/src/paths/project/{projectId}/participants/get.test.ts @@ -3,7 +3,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; +import { HTTPError } from '../../../../errors/http-error'; import { ProjectService } from '../../../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; import * as get_project_participants from './get'; diff --git a/api/src/paths/project/{projectId}/participants/get.ts b/api/src/paths/project/{projectId}/participants/get.ts index 17045c7e9a..c26d28d8b2 100644 --- a/api/src/paths/project/{projectId}/participants/get.ts +++ b/api/src/paths/project/{projectId}/participants/get.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../constants/roles'; import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; +import { HTTP400 } from '../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; import { ProjectService } from '../../../../services/project-service'; import { getLogger } from '../../../../utils/logger'; @@ -37,7 +37,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -70,6 +71,11 @@ GET.apiDoc = { project_role_name: { type: 'string' }, + user_guid: { + type: 'string', + description: 'The GUID for the user.', + nullable: true + }, user_identifier: { type: 'string' }, diff --git a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.test.ts b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.test.ts index 585b609870..d5fe64f212 100644 --- a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.test.ts @@ -2,14 +2,13 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; +import { HTTPError } from '../../../../../errors/http-error'; import { ProjectService } from '../../../../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../__mocks__/db'; import * as doAllProjectsHaveAProjectLead from '../../../../user/{userId}/delete'; import * as delete_project_participant from './delete'; + chai.use(sinonChai); describe('Delete a project participant.', () => { @@ -17,84 +16,16 @@ describe('Delete a project participant.', () => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - mockReq.params = { projectId: '', projectParticipationId: '2' }; - - try { - const requestHandler = delete_project_participant.deleteProjectParticipant(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no projectParticipationId is provided', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - mockReq.params = { projectId: '1', projectParticipationId: '' }; - - try { - const requestHandler = delete_project_participant.deleteProjectParticipant(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectParticipationId`'); - } - }); - - it('should throw a 400 error when deleteProjectParticipationSQL query fails', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - - mockReq.params = { projectId: '1', projectParticipationId: '2' }; - - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(null); - sinon.stub(ProjectService.prototype, 'getProjectParticipants').resolves([{ id: 1 }]); - sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead').returns(true); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const requestHandler = delete_project_participant.deleteProjectParticipant(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); - } - }); - - it('should throw a 400 error when connection query fails', async () => { + it('should throw a 500 error when deleteProjectParticipationRecord fails', async () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); const dbConnectionObj = getMockDBConnection(); mockReq.params = { projectId: '1', projectParticipationId: '2' }; - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(SQL`some query`); + sinon.stub(ProjectService.prototype, 'deleteProjectParticipationRecord').resolves(); sinon.stub(ProjectService.prototype, 'getProjectParticipants').resolves([{ id: 1 }]); sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead').returns(true); - const mockQuery = sinon.stub(); - - mockQuery.resolves(null); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -109,7 +40,7 @@ describe('Delete a project participant.', () => { expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(500); - expect((actualError as HTTPError).message).to.equal('Failed to delete project team member'); + expect((actualError as HTTPError).message).to.equal('Failed to delete project participant'); } }); @@ -119,7 +50,7 @@ describe('Delete a project participant.', () => { mockReq.params = { projectId: '1', projectParticipationId: '2' }; - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(SQL`some query`); + sinon.stub(ProjectService.prototype, 'deleteProjectParticipationRecord').resolves({ system_user_id: 1 }); const getProjectParticipant = sinon.stub(ProjectService.prototype, 'getProjectParticipants'); const doAllProjectsHaveLead = sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead'); @@ -128,19 +59,11 @@ describe('Delete a project participant.', () => { getProjectParticipant.onCall(1).resolves([{ id: 2 }]); doAllProjectsHaveLead.onCall(1).returns(false); - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [{ system_user_id: 1 }], - rowCount: 1 - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); try { @@ -162,7 +85,7 @@ describe('Delete a project participant.', () => { mockReq.params = { projectId: '1', projectParticipationId: '2' }; - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(SQL`some query`); + sinon.stub(ProjectService.prototype, 'deleteProjectParticipationRecord').resolves({ system_user_id: 1 }); const getProjectParticipant = sinon.stub(ProjectService.prototype, 'getProjectParticipants'); const doAllProjectsHaveLead = sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead'); @@ -171,19 +94,11 @@ describe('Delete a project participant.', () => { getProjectParticipant.onCall(1).resolves([{ id: 2 }]); doAllProjectsHaveLead.onCall(1).returns(true); - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [{ system_user_id: 1 }], - rowCount: 1 - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); const requestHandler = delete_project_participant.deleteProjectParticipant(); diff --git a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.ts b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.ts index 9342241ba8..cbd339229f 100644 --- a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.ts +++ b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/delete.ts @@ -1,9 +1,8 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../database/db'; -import { HTTP400, HTTP500 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; +import { getDBConnection } from '../../../../../database/db'; +import { HTTP400, HTTP500 } from '../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; import { ProjectService } from '../../../../../services/project-service'; import { getLogger } from '../../../../../utils/logger'; @@ -39,7 +38,8 @@ DELETE.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -47,7 +47,8 @@ DELETE.apiDoc = { in: 'path', name: 'projectParticipationId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -76,15 +77,8 @@ DELETE.apiDoc = { export function deleteProjectParticipant(): RequestHandler { return async (req, res) => { - defaultLog.debug({ label: 'deleteProjectParticipant', message: 'params', req_params: req.params }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.projectParticipationId) { - throw new HTTP400('Missing required path param `projectParticipationId`'); - } + const projectId = Number(req.params.projectId); + const projectParticipationId = Number(req.params.projectParticipationId); const connection = getDBConnection(req['keycloak_token']); @@ -94,10 +88,10 @@ export function deleteProjectParticipant(): RequestHandler { const projectService = new ProjectService(connection); // Check project lead roles before deleting user - const projectParticipantsResponse1 = await projectService.getProjectParticipants(Number(req.params.projectId)); + const projectParticipantsResponse1 = await projectService.getProjectParticipants(projectId); const projectHasLeadResponse1 = doAllProjectsHaveAProjectLead(projectParticipantsResponse1); - const result = await deleteProjectParticipationRecord(Number(req.params.projectParticipationId), connection); + const result = await projectService.deleteProjectParticipationRecord(projectParticipationId); if (!result || !result.system_user_id) { // The delete result is missing necesary data, fail the request @@ -107,7 +101,7 @@ export function deleteProjectParticipant(): RequestHandler { // If Project Lead roles are invalide skip check to prevent removal of only Project Lead of project // (Project is already missing Project Lead and is in a bad state) if (projectHasLeadResponse1) { - const projectParticipantsResponse2 = await projectService.getProjectParticipants(Number(req.params.projectId)); + const projectParticipantsResponse2 = await projectService.getProjectParticipants(projectId); const projectHasLeadResponse2 = doAllProjectsHaveAProjectLead(projectParticipantsResponse2); if (!projectHasLeadResponse2) { @@ -127,22 +121,3 @@ export function deleteProjectParticipant(): RequestHandler { } }; } - -export const deleteProjectParticipationRecord = async ( - projectParticipationId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.projectParticipation.deleteProjectParticipationSQL(projectParticipationId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP500('Failed to delete project team member'); - } - - return response.rows[0]; -}; diff --git a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.test.ts b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.test.ts index 524715b1b9..50d34b86d7 100644 --- a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.test.ts +++ b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.test.ts @@ -2,116 +2,27 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; +import { HTTPError } from '../../../../../errors/http-error'; import { ProjectService } from '../../../../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../__mocks__/db'; import * as doAllProjectsHaveAProjectLead from '../../../../user/{userId}/delete'; import * as update_project_participant from './update'; chai.use(sinonChai); -describe('Delete a project participant.', () => { +describe('update a project participant.', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - mockReq.params = { projectId: '', projectParticipationId: '2' }; - - try { - const requestHandler = update_project_participant.updateProjectParticipantRole(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no projectParticipationId is provided', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - mockReq.params = { projectId: '1', projectParticipationId: '' }; - - try { - const requestHandler = update_project_participant.updateProjectParticipantRole(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectParticipationId`'); - } - }); - - it('should throw a 400 error when no roleId is provided', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - mockReq.params = { projectId: '1', projectParticipationId: '2' }; - mockReq.body = { roleId: '' }; - - try { - const requestHandler = update_project_participant.updateProjectParticipantRole(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param `roleId`'); - } - }); - - it('should throw a 400 error when deleteProjectParticipationSQL query fails', async () => { - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - const dbConnectionObj = getMockDBConnection(); - - mockReq.params = { projectId: '1', projectParticipationId: '2' }; - mockReq.body = { roleId: '1' }; - - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(null); - sinon.stub(ProjectService.prototype, 'getProjectParticipants').resolves([{ id: 1 }]); - sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead').returns(true); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const requestHandler = update_project_participant.updateProjectParticipantRole(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); - } - }); - - it('should throw a 400 error when connection query fails', async () => { + it('should throw a 400 error when delete fails', async () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); const dbConnectionObj = getMockDBConnection(); mockReq.params = { projectId: '1', projectParticipationId: '2' }; mockReq.body = { roleId: '1' }; - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(SQL`some query`); + sinon.stub(ProjectService.prototype, 'deleteProjectParticipationRecord').resolves(); sinon.stub(ProjectService.prototype, 'getProjectParticipants').resolves([{ id: 1 }]); sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead').returns(true); @@ -129,7 +40,7 @@ describe('Delete a project participant.', () => { expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(500); - expect((actualError as HTTPError).message).to.equal('Failed to delete project team member'); + expect((actualError as HTTPError).message).to.equal('Failed to update project participant role'); } }); @@ -140,7 +51,8 @@ describe('Delete a project participant.', () => { mockReq.params = { projectId: '1', projectParticipationId: '2' }; mockReq.body = { roleId: '1' }; - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(SQL`some query`); + sinon.stub(ProjectService.prototype, 'deleteProjectParticipationRecord').resolves({ system_user_id: 1 }); + sinon.stub(ProjectService.prototype, 'addProjectParticipant').resolves(); const getProjectParticipant = sinon.stub(ProjectService.prototype, 'getProjectParticipants'); const doAllProjectsHaveLead = sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead'); @@ -149,19 +61,11 @@ describe('Delete a project participant.', () => { getProjectParticipant.onCall(1).resolves([{ id: 2 }]); doAllProjectsHaveLead.onCall(1).returns(false); - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [{ system_user_id: 1 }], - rowCount: 1 - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); try { @@ -184,7 +88,8 @@ describe('Delete a project participant.', () => { mockReq.params = { projectId: '1', projectParticipationId: '2' }; mockReq.body = { roleId: '1' }; - sinon.stub(queries.projectParticipation, 'deleteProjectParticipationSQL').returns(SQL`some query`); + sinon.stub(ProjectService.prototype, 'deleteProjectParticipationRecord').resolves({ system_user_id: 1 }); + sinon.stub(ProjectService.prototype, 'addProjectParticipant').resolves(); const getProjectParticipant = sinon.stub(ProjectService.prototype, 'getProjectParticipants'); const doAllProjectsHaveLead = sinon.stub(doAllProjectsHaveAProjectLead, 'doAllProjectsHaveAProjectLead'); @@ -193,19 +98,11 @@ describe('Delete a project participant.', () => { getProjectParticipant.onCall(1).resolves([{ id: 2 }]); doAllProjectsHaveLead.onCall(1).returns(true); - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [{ system_user_id: 1 }], - rowCount: 1 - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); const requestHandler = update_project_participant.updateProjectParticipantRole(); diff --git a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.ts b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.ts index 115249a4df..734b4a7777 100644 --- a/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.ts +++ b/api/src/paths/project/{projectId}/participants/{projectParticipationId}/update.ts @@ -2,12 +2,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../constants/roles'; import { getDBConnection } from '../../../../../database/db'; -import { HTTP400, HTTP500 } from '../../../../../errors/custom-error'; +import { HTTP400, HTTP500 } from '../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; import { ProjectService } from '../../../../../services/project-service'; import { getLogger } from '../../../../../utils/logger'; import { doAllProjectsHaveAProjectLead } from '../../../../user/{userId}/delete'; -import { deleteProjectParticipationRecord } from './delete'; const defaultLog = getLogger('/api/project/{projectId}/participants/{projectParticipationId}/update'); @@ -39,7 +38,8 @@ PUT.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -47,7 +47,8 @@ PUT.apiDoc = { in: 'path', name: 'projectParticipationId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -60,7 +61,8 @@ PUT.apiDoc = { required: ['roleId'], properties: { roleId: { - type: 'number' + type: 'integer', + minimum: 1 } } } @@ -91,19 +93,9 @@ PUT.apiDoc = { export function updateProjectParticipantRole(): RequestHandler { return async (req, res) => { - defaultLog.debug({ label: 'updateProjectParticipantRole', message: 'params', req_params: req.params }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.projectParticipationId) { - throw new HTTP400('Missing required path param `projectParticipationId`'); - } - - if (!req.body.roleId) { - throw new HTTP400('Missing required body param `roleId`'); - } + const projectId = Number(req.params.projectId); + const projectParticipationId = Number(req.params.projectParticipationId); + const roleId = Number(req.body.roleId); const connection = getDBConnection(req['keycloak_token']); @@ -117,7 +109,7 @@ export function updateProjectParticipantRole(): RequestHandler { const projectHasLeadResponse1 = doAllProjectsHaveAProjectLead(projectParticipantsResponse1); // Delete the user's old participation record, returning the old record - const result = await deleteProjectParticipationRecord(Number(req.params.projectParticipationId), connection); + const result = await projectService.deleteProjectParticipationRecord(projectParticipationId); if (!result || !result.system_user_id) { // The delete result is missing necessary data, fail the request @@ -125,9 +117,9 @@ export function updateProjectParticipantRole(): RequestHandler { } await projectService.addProjectParticipant( - Number(req.params.projectId), + projectId, Number(result.system_user_id), // get the user's system id from the old participation record - Number(req.body.roleId) + roleId ); // If Project Lead roles are invalid skip check to prevent removal of only Project Lead of project diff --git a/api/src/paths/project/{projectId}/publish.test.ts b/api/src/paths/project/{projectId}/publish.test.ts deleted file mode 100644 index 8cbdd3dcb7..0000000000 --- a/api/src/paths/project/{projectId}/publish.test.ts +++ /dev/null @@ -1,141 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import { QueryResult } from 'pg'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; -import { ProjectService } from '../../../services/project-service'; -import { getMockDBConnection } from '../../../__mocks__/db'; -import * as publish from './publish'; - -chai.use(sinonChai); - -const dbConnectionObj = getMockDBConnection(); - -const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1 - }, - body: { - publish: true - } -} as any; - -let actualResult = { - id: null -}; - -const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } -}; - -describe('project/{projectId}/publish', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when missing request param projectId', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const result = publish.publishProject(); - - await result( - { ...sampleReq, body: { ...sampleReq.body }, params: { projectId: undefined } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path parameter: projectId'); - } - }); - - it('should throw a 400 error when missing request body', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const result = publish.publishProject(); - - await result( - { ...sampleReq, body: (null as unknown) as any }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing request body'); - } - }); - - it('should throw a 400 error when missing publish flag in request body', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const result = publish.publishProject(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, publish: undefined } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing publish flag in request body'); - } - }); - - it('should return the project id on success', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: async () => { - return { - rowCount: 1, - rows: [ - { - id: 1, - create_date: '2020/04/04' - } - ] - } as QueryResult; - } - }); - - sinon.stub(ProjectService.prototype, 'updatePublishStatus').resolves(1); - - const result = publish.publishProject(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult.id).to.equal(1); - }); -}); diff --git a/api/src/paths/project/{projectId}/publish.ts b/api/src/paths/project/{projectId}/publish.ts deleted file mode 100644 index 1ae11be4e1..0000000000 --- a/api/src/paths/project/{projectId}/publish.ts +++ /dev/null @@ -1,136 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../constants/roles'; -import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; -import { projectIdResponseObject } from '../../../openapi/schemas/project'; -import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; -import { ProjectService } from '../../../services/project-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('paths/project/{projectId}/publish'); - -export const PUT: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - publishProject() -]; - -PUT.apiDoc = { - description: 'Publish or unpublish a project.', - tags: ['project'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - } - ], - requestBody: { - description: 'Publish or unpublish put request object.', - content: { - 'application/json': { - schema: { - title: 'Publish request object', - type: 'object', - required: ['publish'], - properties: { - publish: { - title: 'publish?', - type: 'boolean' - } - } - } - } - } - }, - responses: { - 200: { - description: 'Project publish request completed successfully.', - content: { - 'application/json': { - schema: { - // TODO is there any return value? or is it just an HTTP status with no content? - ...(projectIdResponseObject as object) - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/401' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -/** - * Update a project. - * - * @returns {RequestHandler} - */ -export function publishProject(): RequestHandler { - return async (req, res) => { - const connection = getDBConnection(req['keycloak_token']); - - try { - const projectId = Number(req.params.projectId); - - if (!projectId) { - throw new HTTP400('Missing required path parameter: projectId'); - } - - if (!req.body) { - throw new HTTP400('Missing request body'); - } - - if (req.body.publish === undefined) { - throw new HTTP400('Missing publish flag in request body'); - } - - const publish: boolean = req.body.publish; - - await connection.open(); - - const projectService = new ProjectService(connection); - - const result = await projectService.updatePublishStatus(projectId, publish); - - await connection.commit(); - return res.status(200).json({ id: result }); - } catch (error) { - defaultLog.error({ label: 'publishProject', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/survey/create.test.ts b/api/src/paths/project/{projectId}/survey/create.test.ts index a9bc40e934..d21ca4f184 100644 --- a/api/src/paths/project/{projectId}/survey/create.test.ts +++ b/api/src/paths/project/{projectId}/survey/create.test.ts @@ -3,7 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; +import { HTTPError } from '../../../../errors/http-error'; +import { PlatformService } from '../../../../services/platform-service'; import { SurveyService } from '../../../../services/survey-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; import { createSurvey } from './create'; @@ -23,6 +24,8 @@ describe('survey/create', () => { sinon.stub(SurveyService.prototype, 'createSurvey').resolves(2); + sinon.stub(PlatformService.prototype, 'submitDwCAMetadataPackage').resolves(); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { projectId: '1' }; diff --git a/api/src/paths/project/{projectId}/survey/create.ts b/api/src/paths/project/{projectId}/survey/create.ts index 750c952ddf..67fccee3f8 100644 --- a/api/src/paths/project/{projectId}/survey/create.ts +++ b/api/src/paths/project/{projectId}/survey/create.ts @@ -5,6 +5,7 @@ import { getDBConnection } from '../../../../database/db'; import { PostSurveyObject } from '../../../../models/survey-create'; import { geoJsonFeature } from '../../../../openapi/schemas/geoJson'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; +import { PlatformService } from '../../../../services/platform-service'; import { SurveyService } from '../../../../services/survey-service'; import { getLogger } from '../../../../utils/logger'; @@ -106,11 +107,20 @@ POST.apiDoc = { permit: { type: 'object', properties: { - permit_number: { - type: 'string' - }, - permit_type: { - type: 'string' + permits: { + type: 'array', + items: { + type: 'object', + required: ['permit_number', 'permit_type'], + properties: { + permit_number: { + type: 'string' + }, + permit_type: { + type: 'string' + } + } + } } } }, @@ -168,10 +178,6 @@ POST.apiDoc = { }, ecological_season_id: { type: 'number' - }, - surveyed_all_areas: { - type: 'string', - enum: ['true', 'false'] } } }, @@ -188,23 +194,6 @@ POST.apiDoc = { } } } - }, - agreements: { - type: 'object', - properties: { - foippa_requirements_accepted: { - type: 'boolean', - enum: [true], - description: - 'Data meets or exceeds the Freedom of Information and Protection of Privacy Act (FOIPPA) Requirements' - }, - sedis_procedures_accepted: { - type: 'boolean', - enum: [true], - description: - 'Data is in accordance with the Species and Ecosystems Data and Information Security (SEDIS) Procedures' - } - } } } } @@ -262,6 +251,14 @@ export function createSurvey(): RequestHandler { const surveyId = await surveyService.createSurvey(projectId, sanitizedPostSurveyData); + try { + const platformService = new PlatformService(connection); + await platformService.submitDwCAMetadataPackage(projectId); + } catch (error) { + // Don't fail the rest of the endpoint if submitting metadata fails + defaultLog.error({ label: 'createSurvey->submitDwCAMetadataPackage', message: 'error', error }); + } + await connection.commit(); return res.status(200).json({ id: surveyId }); diff --git a/api/src/paths/project/{projectId}/survey/funding-sources/list.test.ts b/api/src/paths/project/{projectId}/survey/funding-sources/list.test.ts index 67d5bd4121..811ac7314f 100644 --- a/api/src/paths/project/{projectId}/survey/funding-sources/list.test.ts +++ b/api/src/paths/project/{projectId}/survey/funding-sources/list.test.ts @@ -3,7 +3,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; +import { HTTPError } from '../../../../../errors/http-error'; import { ProjectService } from '../../../../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../__mocks__/db'; import { getSurveyFundingSources } from './list'; diff --git a/api/src/paths/project/{projectId}/survey/funding-sources/list.ts b/api/src/paths/project/{projectId}/survey/funding-sources/list.ts index 25573fc1c4..cfd018cd3d 100644 --- a/api/src/paths/project/{projectId}/survey/funding-sources/list.ts +++ b/api/src/paths/project/{projectId}/survey/funding-sources/list.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../constants/roles'; import { getDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; +import { HTTP400 } from '../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; import { ProjectService } from '../../../../../services/project-service'; import { getLogger } from '../../../../../utils/logger'; diff --git a/api/src/paths/project/{projectId}/survey/permits/list.test.ts b/api/src/paths/project/{projectId}/survey/permits/list.test.ts deleted file mode 100644 index 702552b27a..0000000000 --- a/api/src/paths/project/{projectId}/survey/permits/list.test.ts +++ /dev/null @@ -1,147 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import survey_queries from '../../../../../queries/survey'; -import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as list from './list'; - -chai.use(sinonChai); - -describe('getSurveyPermits', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw a 400 error when no project id path param', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const result = list.getSurveyPermits(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for assignable survey permits', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'getAllAssignablePermitsForASurveySQL').returns(null); - - try { - const result = list.getSurveyPermits(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should return the survey permits on success', async () => { - const surveyPermits = [ - { - number: '123', - type: 'scientific' - }, - { - number: '12345', - type: 'wildlife' - } - ]; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: surveyPermits }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getAllAssignablePermitsForASurveySQL').returns(SQL`some query`); - - const result = list.getSurveyPermits(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql([ - { - permit_number: '123', - permit_type: 'scientific' - }, - { - permit_number: '12345', - permit_type: 'wildlife' - } - ]); - }); - - it('should return an empty array when survey permits response has no rows', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getAllAssignablePermitsForASurveySQL').returns(SQL`some query`); - - const result = list.getSurveyPermits(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.eql([]); - }); -}); diff --git a/api/src/paths/project/{projectId}/survey/permits/list.ts b/api/src/paths/project/{projectId}/survey/permits/list.ts deleted file mode 100644 index 7ba5ebd9c8..0000000000 --- a/api/src/paths/project/{projectId}/survey/permits/list.ts +++ /dev/null @@ -1,121 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { GetPermitData } from '../../../../../models/project-view'; -import { queries } from '../../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../../utils/logger'; - -const defaultLog = getLogger('/api/project/{projectId}/survey/permits/list'); - -export const GET: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR, PROJECT_ROLE.PROJECT_VIEWER], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - getSurveyPermits() -]; - -GET.apiDoc = { - description: 'Fetches a list of permits for a survey based on a project.', - tags: ['permits'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - } - ], - responses: { - 200: { - description: 'Permits get response array.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - title: 'Survey permit Get Response Object', - type: 'object', - properties: { - permit_number: { - type: 'string' - }, - permit_type: { - type: 'string' - } - } - }, - description: 'Permits applicable for the survey' - } - } - } - }, - 401: { - $ref: '#/components/responses/401' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getSurveyPermits(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ label: 'Get survey permits list', message: 'params', req_params: req.params }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - const getSurveyPermitsSQLStatement = queries.survey.getAllAssignablePermitsForASurveySQL( - Number(req.params.projectId) - ); - - if (!getSurveyPermitsSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - await connection.open(); - - const response = await connection.query(getSurveyPermitsSQLStatement.text, getSurveyPermitsSQLStatement.values); - - await connection.commit(); - - const result = (response && response.rows) || null; - - const getSurveyPermitsData = new GetPermitData(result); - - if (!getSurveyPermitsData) { - return res.status(200).json(null); - } - - return res.status(200).json(getSurveyPermitsData.permits); - } catch (error) { - defaultLog.error({ label: 'getSurveyPermits', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.test.ts index 705e32883c..683cb6ec7b 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.test.ts @@ -2,236 +2,85 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../database/db'; -import { HTTPError } from '../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../services/attachment-service'; import { getMockDBConnection } from '../../../../../../__mocks__/db'; -import * as listAttachments from './list'; +import * as list from './list'; chai.use(sinonChai); -describe('lists the survey attachments', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1, - surveyId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - +describe('getSurveyAttachments', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no surveyId is provided', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = listAttachments.getSurveyAttachments(); - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getSurveyAttachmentsSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const expectedError = new Error('cannot process request'); + const getSurveyAttachmentsStub = sinon + .stub(AttachmentService.prototype, 'getSurveyAttachments') + .rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 } - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(null); + } as any; try { - const result = listAttachments.getSurveyAttachments(); + const result = list.getSurveyAttachments(); await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect(getSurveyAttachmentsStub).to.be.calledOnce; + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return a list of survey attachments where the lastModified is the create_date', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - file_name: 'name1', - create_date: '2020-01-01', - update_date: '', - file_size: 50, - file_type: 'type', - security_token: 'sometoken' - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - id: 14, - file_name: 'name2', - create_date: '2020-01-01', - update_date: '', - file_size: 50, - file_type: 'type', - security_token: 'sometoken' - } - ] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(SQL`something`); - - const result = listAttachments.getSurveyAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); + it('should get Survey Attachments and Reports', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - expect(actualResult).to.be.an('object'); - expect(actualResult).to.have.property('attachmentsList'); + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; - expect(actualResult.attachmentsList).to.be.an('array'); - expect(actualResult.attachmentsList).to.have.length(2); + const getSurveyAttachmentsStub = sinon.stub(AttachmentService.prototype, 'getSurveyAttachments').resolves([]); - expect(actualResult.attachmentsList[0].fileName).to.equal('name1'); - expect(actualResult.attachmentsList[0].fileType).to.equal('type'); - expect(actualResult.attachmentsList[0].id).to.equal(13); - expect(actualResult.attachmentsList[0].lastModified).to.match(new RegExp('2020-01-01T.*')); - expect(actualResult.attachmentsList[0].size).to.equal(50); - expect(actualResult.attachmentsList[0].securityToken).to.equal('sometoken'); + const getSurveyReportAttachmentsStub = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachments') + .resolves([]); - expect(actualResult.attachmentsList[1].fileName).to.equal('name2'); - expect(actualResult.attachmentsList[1].fileType).to.equal('type'); - expect(actualResult.attachmentsList[1].id).to.equal(14); - expect(actualResult.attachmentsList[1].lastModified).to.match(new RegExp('2020-01-01T.*')); - expect(actualResult.attachmentsList[1].size).to.equal(50); - expect(actualResult.attachmentsList[1].securityToken).to.equal('sometoken'); - }); + const expectedResult = { attachmentsList: [], reportAttachmentsList: [] }; - it('should return a list of survey attachments where the lastModified is the update_date', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - file_name: 'name1', - create_date: '2020-01-01', - update_date: '2020-04-04', - file_size: 50, - file_type: 'type', - security_token: 'sometoken' - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - id: 14, - file_name: 'name2', - create_date: '2020-01-01', - update_date: '2020-04-04', - file_size: 50, - file_type: 'type', - security_token: 'sometoken' + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; } - ] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(SQL`something`); - - const result = listAttachments.getSurveyAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.an('object'); - expect(actualResult).to.have.property('attachmentsList'); - - expect(actualResult.attachmentsList).to.be.an('array'); - expect(actualResult.attachmentsList).to.have.length(2); - - expect(actualResult.attachmentsList[0].fileName).to.equal('name1'); - expect(actualResult.attachmentsList[0].fileType).to.equal('type'); - expect(actualResult.attachmentsList[0].id).to.equal(13); - expect(actualResult.attachmentsList[0].lastModified).to.match(new RegExp('2020-04-04T.*')); - expect(actualResult.attachmentsList[0].size).to.equal(50); - expect(actualResult.attachmentsList[0].securityToken).to.equal('sometoken'); - - expect(actualResult.attachmentsList[1].fileName).to.equal('name2'); - expect(actualResult.attachmentsList[1].fileType).to.equal('type'); - expect(actualResult.attachmentsList[1].id).to.equal(14); - expect(actualResult.attachmentsList[1].lastModified).to.match(new RegExp('2020-04-04T.*')); - expect(actualResult.attachmentsList[1].size).to.equal(50); - expect(actualResult.attachmentsList[1].securityToken).to.equal('sometoken'); - }); - - it('should return null if the survey has no attachments, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: undefined }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(SQL`something`); - - const result = listAttachments.getSurveyAttachments(); + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = list.getSurveyAttachments(); - expect(actualResult).to.be.null; + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResult); + expect(getSurveyAttachmentsStub).to.be.calledOnce; + expect(getSurveyReportAttachmentsStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.ts index a6e66b8d46..22c5bd367b 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/list.ts @@ -2,10 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../database/db'; -import { HTTP400 } from '../../../../../../errors/custom-error'; import { GetAttachmentsData } from '../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../services/attachment-service'; import { getLogger } from '../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/attachments/list'); @@ -38,7 +37,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -46,7 +46,8 @@ GET.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -103,43 +104,20 @@ export function getSurveyAttachments(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'Get attachments list', message: 'params', req_params: req.params }); - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - const connection = getDBConnection(req['keycloak_token']); + const surveyId = Number(req.params.surveyId); try { - const getSurveyAttachmentsSQLStatement = queries.survey.getSurveyAttachmentsSQL(Number(req.params.surveyId)); - const getSurveyReportAttachmentsSQLStatement = queries.survey.getSurveyReportAttachmentsSQL( - Number(req.params.surveyId) - ); - - if (!getSurveyAttachmentsSQLStatement || !getSurveyReportAttachmentsSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); - const attachmentsData = await connection.query( - getSurveyAttachmentsSQLStatement.text, - getSurveyAttachmentsSQLStatement.values - ); + const attachmentService = new AttachmentService(connection); - const reportAttachmentsData = await connection.query( - getSurveyReportAttachmentsSQLStatement.text, - getSurveyReportAttachmentsSQLStatement.values - ); + const attachmentsData = await attachmentService.getSurveyAttachments(surveyId); + const reportAttachmentsData = await attachmentService.getSurveyReportAttachments(surveyId); await connection.commit(); - const getAttachmentsData = - (attachmentsData && - reportAttachmentsData && - attachmentsData.rows && - reportAttachmentsData.rows && - new GetAttachmentsData([...attachmentsData.rows, ...reportAttachmentsData.rows])) || - null; + const getAttachmentsData = new GetAttachmentsData(attachmentsData, reportAttachmentsData); return res.status(200).json(getAttachmentsData); } catch (error) { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.test.ts index 8408142c76..d4a22bf220 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.test.ts @@ -3,7 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; +import { HTTPError } from '../../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../../services/attachment-service'; import * as file_utils from '../../../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../../../__mocks__/db'; import * as upload from './upload'; @@ -15,177 +16,157 @@ describe('uploadMedia', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - surveyId: 1 - }, - files: [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ], - body: { - attachmentType: 'Report' - }, - auth_payload: { - preferred_username: 'user', - email: 'email@example.com' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { + it('should throw an error when files are missing', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = upload.uploadMedia(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing projectId'); - } - }); - - it('should throw an error when surveyId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 + }, + files: [], + body: { + attachmentType: 'Other' + } + } as any; try { const result = upload.uploadMedia(); - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing surveyId'); + expect((actualError as HTTPError).message).to.equal('Missing upload data'); } }); - it('should throw an error when files are missing', async () => { + it('should throw an error when file format incorrect', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = upload.uploadMedia(); - - await result({ ...sampleReq, files: [] }, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing upload data'); - } - }); - - it('should throw a 400 error when file format incorrect', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 + }, + files: [ + { + fieldname: 'media', + originalname: 'test.txt', + encoding: '7bit', + mimetype: 'text/plain', + size: 340 + } + ], + body: { + attachmentType: 'Other' } - }); + } as any; - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + sinon.stub(file_utils, 'scanFileForVirus').resolves(false); try { const result = upload.uploadMedia(); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to insert survey attachment data'); + expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); } }); - it('should throw a 400 error when file contains malicious content', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + it('should throw an error if failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 + }, + files: [ + { + fieldname: 'media', + originalname: 'test.txt', + encoding: '7bit', + mimetype: 'text/plain', + size: 340 + } + ], + body: { + attachmentType: 'Other' } - }); + } as any; - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertSurveyReportAttachment').resolves({ id: 1, revision_count: 0, key: '1/1/test.txt' }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(false); + sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'upsertSurveyReportAttachment').rejects(expectedError); try { const result = upload.uploadMedia(); - await result(sampleReq, sampleRes as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return id and revision_count on success (with username and email)', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); + it('should succeed with valid params', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertSurveyReportAttachment').resolves({ id: 1, revision_count: 0, key: '1/1/test.txt' }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - - const result = upload.uploadMedia(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql({ attachmentId: 1, revision_count: 0 }); - }); - - it('should return id and revision_count on success (without username and email)', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + sinon.stub(file_utils, 'uploadFileToS3').resolves(); + + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 + }, + files: [ + { + fieldname: 'media', + originalname: 'test.txt', + encoding: '7bit', + mimetype: 'text/plain', + size: 340 + } + ], + body: { + attachmentType: 'Other' } - }); + } as any; + + const expectedResponse = { attachmentId: 1, revision_count: 1 }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertSurveyReportAttachment').resolves({ id: 1, revision_count: 0, key: '1/1/test.txt' }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + const upsertSurveyReportAttachmentStub = sinon + .stub(AttachmentService.prototype, 'upsertSurveyReportAttachment') + .resolves({ id: 1, revision_count: 1, key: 'string' }); const result = upload.uploadMedia(); - await result( - { ...sampleReq, auth_payload: { ...sampleReq.auth_payload, preferred_username: null, email: null } }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.eql({ attachmentId: 1, revision_count: 0 }); + await result(mockReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(upsertSurveyReportAttachmentStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.ts index 2e69e31106..062182504f 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/report/upload.ts @@ -1,16 +1,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { - IReportAttachmentAuthor, - PostReportAttachmentMetadata, - PutReportAttachmentMetadata -} from '../../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../../queries/queries'; +import { getDBConnection } from '../../../../../../../database/db'; +import { HTTP400 } from '../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; -import { generateS3FileKey, scanFileForVirus, uploadFileToS3 } from '../../../../../../../utils/file-utils'; +import { AttachmentService } from '../../../../../../../services/attachment-service'; +import { scanFileForVirus, uploadFileToS3 } from '../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/attachments/report/upload'); @@ -41,11 +36,19 @@ POST.apiDoc = { { in: 'path', name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, required: true }, { in: 'path', name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, required: true } ], @@ -148,23 +151,11 @@ export function uploadMedia(): RequestHandler { return async (req, res) => { const rawMediaArray: Express.Multer.File[] = req.files as Express.Multer.File[]; - if (!req.params.projectId) { - throw new HTTP400('Missing projectId'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing surveyId'); - } - if (!rawMediaArray || !rawMediaArray.length) { // no media objects included, skipping media upload step throw new HTTP400('Missing upload data'); } - if (!req.body) { - throw new HTTP400('Missing request body'); - } - const rawMediaFile: Express.Multer.File = rawMediaArray[0]; defaultLog.debug({ @@ -185,12 +176,13 @@ export function uploadMedia(): RequestHandler { throw new HTTP400('Malicious content detected, upload cancelled'); } - const upsertResult = await upsertSurveyReportAttachment( + const attachmentService = new AttachmentService(connection); + + const upsertResult = await attachmentService.upsertSurveyReportAttachment( rawMediaFile, Number(req.params.projectId), Number(req.params.surveyId), - req.body.attachmentMeta, - connection + req.body.attachmentMeta ); const metadata = { @@ -215,143 +207,3 @@ export function uploadMedia(): RequestHandler { } }; } - -export const upsertSurveyReportAttachment = async ( - file: Express.Multer.File, - projectId: number, - surveyId: number, - attachmentMeta: any, - connection: IDBConnection -): Promise<{ id: number; revision_count: number; key: string }> => { - const getSqlStatement = queries.survey.getSurveyReportAttachmentByFileNameSQL(surveyId, file.originalname); - - if (!getSqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const key = generateS3FileKey({ - projectId: projectId, - surveyId: surveyId, - fileName: file.originalname, - folder: 'reports' - }); - - const getResponse = await connection.query(getSqlStatement.text, getSqlStatement.values); - - let metadata; - let attachmentResult: { id: number; revision_count: number }; - - if (getResponse && getResponse.rowCount > 0) { - // Existing attachment with matching name found, update it - metadata = new PutReportAttachmentMetadata(attachmentMeta); - attachmentResult = await updateSurveyReportAttachment(file, surveyId, metadata, connection); - } else { - // No matching attachment found, insert new attachment - metadata = new PostReportAttachmentMetadata(attachmentMeta); - attachmentResult = await insertSurveyReportAttachment( - file, - surveyId, - new PostReportAttachmentMetadata(attachmentMeta), - key, - connection - ); - } - - // Delete any existing attachment author records - await deleteSurveyReportAttachmentAuthors(attachmentResult.id, connection); - - const promises = []; - - // Insert any new attachment author records - promises.push( - metadata.authors.map((author) => insertSurveyReportAttachmentAuthor(attachmentResult.id, author, connection)) - ); - - await Promise.all(promises); - - return { ...attachmentResult, key }; -}; - -export const insertSurveyReportAttachment = async ( - file: Express.Multer.File, - surveyId: number, - attachmentMeta: PostReportAttachmentMetadata, - key: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.survey.postSurveyReportAttachmentSQL( - file.originalname, - file.size, - surveyId, - key, - attachmentMeta - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to insert survey attachment data'); - } - - return response.rows[0]; -}; - -export const updateSurveyReportAttachment = async ( - file: Express.Multer.File, - surveyId: number, - attachmentMeta: PutReportAttachmentMetadata, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.survey.putSurveyReportAttachmentSQL(surveyId, file.originalname, attachmentMeta); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to update survey attachment data'); - } - - return response.rows[0]; -}; - -export const deleteSurveyReportAttachmentAuthors = async ( - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.deleteSurveyReportAttachmentAuthorsSQL(attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete attachment report authors statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response) { - throw new HTTP400('Failed to delete attachment report authors records'); - } -}; - -export const insertSurveyReportAttachmentAuthor = async ( - attachmentId: number, - author: IReportAttachmentAuthor, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.insertSurveyReportAttachmentAuthorSQL(attachmentId, author); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert attachment report author statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert attachment report author record'); - } -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.test.ts index 8e2db16132..3abcb15a04 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.test.ts @@ -2,10 +2,9 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../database/db'; -import { HTTPError } from '../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../services/attachment-service'; import * as file_utils from '../../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../../__mocks__/db'; import * as upload from './upload'; @@ -19,11 +18,11 @@ describe('uploadMedia', () => { const dbConnectionObj = getMockDBConnection(); - const sampleReq = { + const mockReq = { keycloak_token: {}, params: { projectId: 1, - surveyId: 1 + attachmentId: 2 }, files: [ { @@ -34,70 +33,16 @@ describe('uploadMedia', () => { size: 340 } ], - body: { - attachmentType: 'Other' - }, - auth_payload: { - preferred_username: 'user', - email: 'email@example.com' - } + body: {} } as any; - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = upload.uploadMedia(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing projectId'); - } - }); - - it('should throw an error when surveyId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = upload.uploadMedia(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing surveyId'); - } - }); - it('should throw an error when files are missing', async () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); try { const result = upload.uploadMedia(); - await result({ ...sampleReq, files: [] }, (null as unknown) as any, (null as unknown) as any); + await result({ ...mockReq, files: [] }, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); @@ -105,7 +50,7 @@ describe('uploadMedia', () => { } }); - it('should throw a 400 error when file format incorrect', async () => { + it('should throw an error when file format incorrect', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -113,20 +58,20 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + sinon.stub(file_utils, 'scanFileForVirus').resolves(false); try { const result = upload.uploadMedia(); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to insert survey attachment data'); + expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); } }); - it('should throw a 400 error when file contains malicious content', async () => { + it('should throw an error if failure occurs', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -134,22 +79,22 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertSurveyAttachment').resolves({ id: 1, revision_count: 0, key: '1/1/test.txt' }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(false); + sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'upsertSurveyAttachment').rejects(expectedError); try { const result = upload.uploadMedia(); - await result(sampleReq, sampleRes as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Malicious content detected, upload cancelled'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return id and revision_count on success (with username and email)', async () => { + it('should succeed with valid params', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -157,227 +102,30 @@ describe('uploadMedia', () => { } }); - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertSurveyAttachment').resolves({ id: 1, revision_count: 0, key: '1/1/test.txt' }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - - const result = upload.uploadMedia(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql({ attachmentId: 1, revision_count: 0 }); - }); - - it('should return id and revision_count on success (without username and email)', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + sinon.stub(file_utils, 'uploadFileToS3').resolves(); + + const expectedResponse = { attachmentId: 1, revision_count: 1 }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; } - }); + }; - sinon.stub(file_utils, 'uploadFileToS3').resolves({ Key: '1/1/test.txt' } as any); - sinon.stub(upload, 'upsertSurveyAttachment').resolves({ id: 1, revision_count: 0, key: '1/1/test.txt' }); - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); + const upsertSurveyAttachmentStub = sinon + .stub(AttachmentService.prototype, 'upsertSurveyAttachment') + .resolves({ id: 1, revision_count: 1, key: 'string' }); const result = upload.uploadMedia(); - await result( - { ...sampleReq, auth_payload: { ...sampleReq.auth_payload, preferred_username: null, email: null } }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.eql({ attachmentId: 1, revision_count: 0 }); - }); -}); - -describe('upsertSurveyAttachment', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection({ - systemUserId: () => { - return 20; - } - }); - - const file = { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } as any; - - const projectId = 1; - const surveyId = 2; - const attachmentType = 'Image'; - - it('should throw an error when failed to generate SQL get statement', async () => { - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(null); - - try { - await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, dbConnectionObj); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should throw an error when failed to generate SQL put statement', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ - rowCount: 1 - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'putSurveyAttachmentSQL').returns(null); - - try { - await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, { - ...dbConnectionObj, - query: mockQuery - }); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL update statement'); - } - }); - - it('should throw an error when failed to update survey attachment data', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rowCount: 1 - }) - .onSecondCall() - .resolves({ - rowCount: null - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'putSurveyAttachmentSQL').returns(SQL`something`); - - try { - await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, { - ...dbConnectionObj, - query: mockQuery - }); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to update survey attachment data'); - } - }); - - it('should return the id, revision_count of records updated on success (update)', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rowCount: 1 - }) - .onSecondCall() - .resolves({ - rowCount: 1, - rows: [{ id: 1, revision_count: 0 }] - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'putSurveyAttachmentSQL').returns(SQL`something`); - - const result = await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, { - ...dbConnectionObj, - query: mockQuery - }); - - expect(result).to.eql({ id: 1, revision_count: 0, key: 'projects/1/surveys/2/test.txt' }); - }); - - it('should throw an error when failed to generate SQL insert statement', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ - rowCount: null - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'postSurveyAttachmentSQL').returns(null); - - try { - await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, { - ...dbConnectionObj, - query: mockQuery - }); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL insert statement'); - } - }); - - it('should throw an error when insert result has no rows', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rowCount: null - }) - .onSecondCall() - .resolves({ - rows: [] - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'postSurveyAttachmentSQL').returns(SQL`something`); - - try { - await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, { - ...dbConnectionObj, - query: mockQuery - }); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to insert survey attachment data'); - } - }); - - it('should return the id and revision_count of record inserted on success (insert)', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rowCount: null - }) - .onSecondCall() - .resolves({ - rows: [{ id: 12, revision_count: 0, key: 'projects/1/surveys/2/test.txt' }] - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentByFileNameSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'postSurveyAttachmentSQL').returns(SQL`something`); - - const result = await upload.upsertSurveyAttachment(file, projectId, surveyId, attachmentType, { - ...dbConnectionObj, - query: mockQuery - }); - - expect(result).to.eql({ id: 12, revision_count: 0, key: 'projects/1/surveys/2/test.txt' }); + await result(mockReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(upsertSurveyAttachmentStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.ts index 2da518b052..04dd38b7df 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/upload.ts @@ -2,11 +2,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../../constants/attachments'; import { PROJECT_ROLE } from '../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../database/db'; -import { HTTP400 } from '../../../../../../errors/custom-error'; -import { queries } from '../../../../../../queries/queries'; +import { getDBConnection } from '../../../../../../database/db'; +import { HTTP400 } from '../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; -import { generateS3FileKey, scanFileForVirus, uploadFileToS3 } from '../../../../../../utils/file-utils'; +import { AttachmentService } from '../../../../../../services/attachment-service'; +import { scanFileForVirus, uploadFileToS3 } from '../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/attachments/upload'); @@ -102,14 +102,6 @@ export function uploadMedia(): RequestHandler { return async (req, res) => { const rawMediaArray: Express.Multer.File[] = req.files as Express.Multer.File[]; - if (!req.params.projectId) { - throw new HTTP400('Missing projectId'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing surveyId'); - } - if (!rawMediaArray || !rawMediaArray.length) { // no media objects included, skipping media upload step throw new HTTP400('Missing upload data'); @@ -135,12 +127,13 @@ export function uploadMedia(): RequestHandler { throw new HTTP400('Malicious content detected, upload cancelled'); } - const upsertResult = await upsertSurveyAttachment( + const attachmentService = new AttachmentService(connection); + + const upsertResult = await attachmentService.upsertSurveyAttachment( rawMediaFile, Number(req.params.projectId), Number(req.params.surveyId), - ATTACHMENT_TYPE.OTHER, - connection + ATTACHMENT_TYPE.OTHER ); const metadata = { @@ -165,89 +158,3 @@ export function uploadMedia(): RequestHandler { } }; } - -export const upsertSurveyAttachment = async ( - file: Express.Multer.File, - projectId: number, - surveyId: number, - attachmentType: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number; key: string }> => { - const getSqlStatement = queries.survey.getSurveyAttachmentByFileNameSQL(surveyId, file.originalname); - - if (!getSqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const key = generateS3FileKey({ projectId: projectId, surveyId: surveyId, fileName: file.originalname }); - - const getResponse = await connection.query(getSqlStatement.text, getSqlStatement.values); - - let attachmentResult: { id: number; revision_count: number }; - - if (getResponse && getResponse.rowCount > 0) { - // Existing attachment with matching name found, update it - attachmentResult = await updateSurveyAttachment(file, surveyId, attachmentType, connection); - } else { - // No matching attachment found, insert new attachment - attachmentResult = await insertSurveyAttachment(file, projectId, surveyId, attachmentType, connection); - } - - return { ...attachmentResult, key }; -}; - -export const insertSurveyAttachment = async ( - file: Express.Multer.File, - projectId: number, - surveyId: number, - attachmentType: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const key = generateS3FileKey({ - projectId: projectId, - surveyId: surveyId, - fileName: file.originalname, - folder: 'reports' - }); - - const sqlStatement = queries.survey.postSurveyAttachmentSQL( - file.originalname, - file.size, - attachmentType, - surveyId, - key - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to insert survey attachment data'); - } - - return response.rows[0]; -}; - -export const updateSurveyAttachment = async ( - file: Express.Multer.File, - surveyId: number, - attachmentType: string, - connection: IDBConnection -): Promise<{ id: number; revision_count: number }> => { - const sqlStatement = queries.survey.putSurveyAttachmentSQL(surveyId, file.originalname, attachmentType); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to update survey attachment data'); - } - - return response.rows[0]; -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.test.ts index 29b81d14d3..4db3616acd 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.test.ts @@ -1,16 +1,14 @@ -import { DeleteObjectOutput } from 'aws-sdk/clients/s3'; +import { S3 } from 'aws-sdk'; import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import security_queries from '../../../../../../../queries/security'; -import survey_queries from '../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../../services/attachment-service'; import * as file_utils from '../../../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../../../__mocks__/db'; -import * as delete_attachment from './delete'; +import * as deleteAttachment from './delete'; chai.use(sinonChai); @@ -19,91 +17,8 @@ describe('deleteAttachment', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - surveyId: 3, - attachmentId: 2 - }, - body: { - attachmentType: 'Image', - securityToken: 'token' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - }, - send: () => { - //do nothing - } - }; - } - }; - - it('should throw an error when surveyId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_attachment.deleteAttachment(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_attachment.deleteAttachment(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_attachment.deleteAttachment(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param `attachmentType`'); - } - }); - - it('should throw a 400 error when no sql statement returned for unsecureAttachmentRecordSQL', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -111,159 +26,121 @@ describe('deleteAttachment', () => { } }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(null); - - try { - const result = delete_attachment.deleteAttachment(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL unsecure record statement'); - } - }); - - it('should throw a 400 error when fails to unsecure attachment record', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ rowCount: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - try { - const result = delete_attachment.deleteAttachment(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to unsecure record'); - } - }); - - it('should throw a 400 error when no sql statement returned for deleteSurveyAttachmentSQL', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onFirstCall().resolves({ rowCount: 1 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'deleteSurveyAttachmentSQL').returns(null); + const expectedError = new Error('cannot process request'); + const deleteSurveyReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'deleteSurveyReportAttachmentAuthors') + .rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: { attachmentType: 'Report' }, + params: { + projectId: 1, + attachmentId: 2 + } + } as any; try { - const result = delete_attachment.deleteAttachment(); + const result = deleteAttachment.deleteAttachment(); await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete project attachment statement'); + expect(deleteSurveyReportAttachmentAuthorsStub).to.be.calledOnce; + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return null when deleting file from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ rowCount: 1 }) - .onSecondCall() - .resolves({ rowCount: 1, rows: [{ key: 's3Key' }] }); - + it('should delete Survey `Report` Attachment', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery - }); - - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'deleteSurveyAttachmentSQL').returns(SQL`some query`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves(null); - - const result = delete_attachment.deleteAttachment(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - it('should return null response on success when type is not Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ rowCount: 1 }) - .onSecondCall() - .resolves({ rows: [{ key: 's3Key' }], rowCount: 1 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery + } }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'deleteSurveyAttachmentSQL').returns(SQL`some query`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves('non null response' as DeleteObjectOutput); - - const result = delete_attachment.deleteAttachment(); + const sampleReq = { + keycloak_token: {}, + body: { attachmentType: 'Report' }, + params: { + projectId: 1, + attachmentId: 2 + } + } as any; + + const deleteSurveyReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'deleteSurveyReportAttachmentAuthors') + .resolves(); + + const deleteSurveyReportAttachmentStub = sinon + .stub(AttachmentService.prototype, 'deleteSurveyReportAttachment') + .resolves({ key: 'string' }); + + const fileUtilsStub = sinon + .stub(file_utils, 'deleteFileFromS3') + .resolves((true as unknown) as S3.DeleteObjectOutput); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + send: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = deleteAttachment.deleteAttachment(); - expect(actualResult).to.equal(null); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(undefined); + expect(deleteSurveyReportAttachmentAuthorsStub).to.be.calledOnce; + expect(deleteSurveyReportAttachmentStub).to.be.calledOnce; + expect(fileUtilsStub).to.be.calledOnce; }); - it('should return null response on success when type is Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ rowCount: 1 }) - .onSecondCall() - .resolves({ rowCount: 1 }) - .onThirdCall() - .resolves({ rows: [{ key: 's3Key' }], rowCount: 1 }); - + it('should delete Survey Attachment', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'deleteSurveyReportAttachmentSQL').returns(SQL`some query`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves('non null response' as DeleteObjectOutput); - - const result = delete_attachment.deleteAttachment(); + const sampleReq = { + keycloak_token: {}, + body: { attachmentType: 'Attachment' }, + params: { + projectId: 1, + attachmentId: 2 + } + } as any; + + const deleteSurveyAttachmentStub = sinon + .stub(AttachmentService.prototype, 'deleteSurveyAttachment') + .resolves({ key: 'string' }); + + const fileUtilsStub = sinon.stub(file_utils, 'deleteFileFromS3').resolves(); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: 'Report' } }, - sampleRes as any, - (null as unknown) as any - ); + const result = deleteAttachment.deleteAttachment(); - expect(actualResult).to.equal(null); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(null); + expect(deleteSurveyAttachmentStub).to.be.calledOnce; + expect(fileUtilsStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.ts index 64e427ef90..3347cf729b 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete.ts @@ -2,14 +2,12 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../../../constants/attachments'; import { PROJECT_ROLE } from '../../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; +import { getDBConnection } from '../../../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../../services/attachment-service'; import { deleteFileFromS3 } from '../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../utils/logger'; import { attachmentApiDocObject } from '../../../../../../../utils/shared-api-docs'; -import { deleteSurveyReportAttachmentAuthors } from '../report/upload'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/delete'); @@ -35,7 +33,8 @@ POST.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -43,7 +42,8 @@ POST.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -51,7 +51,8 @@ POST.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -61,7 +62,13 @@ POST.apiDoc = { content: { 'application/json': { schema: { - type: 'object' + type: 'object', + required: ['attachmentType'], + properties: { + attachmentType: { + type: 'string' + } + } } } } @@ -92,35 +99,20 @@ export function deleteAttachment(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'Delete attachment', message: 'params', req_params: req.params }); - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.body || !req.body.attachmentType) { - throw new HTTP400('Missing required body param `attachmentType`'); - } - const connection = getDBConnection(req['keycloak_token']); try { await connection.open(); - // If the attachment record is currently secured, need to unsecure it prior to deleting it - if (req.body.securityToken) { - await unsecureSurveyAttachmentRecord(req.body.securityToken, req.body.attachmentType, connection); - } + const attachmentService = new AttachmentService(connection); let deleteResult: { key: string }; if (req.body.attachmentType === ATTACHMENT_TYPE.REPORT) { - await deleteSurveyReportAttachmentAuthors(Number(req.params.attachmentId), connection); + await attachmentService.deleteSurveyReportAttachmentAuthors(Number(req.params.attachmentId)); - deleteResult = await deleteSurveyReportAttachment(Number(req.params.attachmentId), connection); + deleteResult = await attachmentService.deleteSurveyReportAttachment(Number(req.params.attachmentId)); } else { - deleteResult = await deleteSurveyAttachment(Number(req.params.attachmentId), connection); + deleteResult = await attachmentService.deleteSurveyAttachment(Number(req.params.attachmentId)); } await connection.commit(); @@ -141,65 +133,3 @@ export function deleteAttachment(): RequestHandler { } }; } - -const unsecureSurveyAttachmentRecord = async ( - securityToken: any, - attachmentType: string, - connection: IDBConnection -): Promise => { - const unsecureRecordSQLStatement = - attachmentType === 'Report' - ? queries.security.unsecureAttachmentRecordSQL('survey_report_attachment', securityToken) - : queries.security.unsecureAttachmentRecordSQL('survey_attachment', securityToken); - - if (!unsecureRecordSQLStatement) { - throw new HTTP400('Failed to build SQL unsecure record statement'); - } - - const unsecureRecordSQLResponse = await connection.query( - unsecureRecordSQLStatement.text, - unsecureRecordSQLStatement.values - ); - - if (!unsecureRecordSQLResponse || !unsecureRecordSQLResponse.rowCount) { - throw new HTTP400('Failed to unsecure record'); - } -}; - -export const deleteSurveyAttachment = async ( - attachmentId: number, - connection: IDBConnection -): Promise<{ key: string }> => { - const sqlStatement = queries.survey.deleteSurveyAttachmentSQL(attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete project attachment statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to delete survey attachment record'); - } - - return response.rows[0]; -}; - -export const deleteSurveyReportAttachment = async ( - attachmentId: number, - connection: IDBConnection -): Promise<{ key: string }> => { - const sqlStatement = queries.survey.deleteSurveyReportAttachmentSQL(attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL delete project report attachment statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to delete survey attachment report record'); - } - - return response.rows[0]; -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.test.ts index e6b1524bca..cd574d23b3 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.test.ts @@ -2,11 +2,9 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import { ATTACHMENT_TYPE } from '../../../../../../../constants/attachments'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../../services/attachment-service'; import * as file_utils from '../../../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../../../__mocks__/db'; import * as get_signed_url from './getSignedUrl'; @@ -18,101 +16,25 @@ describe('getSurveyAttachmentSignedURL', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - surveyId: 1, - attachmentId: 2 - }, - query: { - attachmentType: 'Other' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when surveyId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getSurveyAttachmentSignedURL(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getSurveyAttachmentSignedURL(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should return null when getting signed url from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves(null); - - const result = get_signed_url.getSurveyAttachmentSignedURL(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - describe('non report attachments', () => { - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + describe('report attachments', () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'getSurveyReportAttachmentS3Key').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: { attachments: [], security_ids: [] }, + params: { + projectId: 1, + attachmentId: 1 + }, + query: { + attachmentType: 'Report' } - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentS3KeySQL').returns(null); + } as any; try { const result = get_signed_url.getSurveyAttachmentSignedURL(); @@ -120,96 +42,96 @@ describe('getSurveyAttachmentSignedURL', () => { await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build attachment S3 key SQLstatement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); it('should return the signed url response on success', async () => { - const mockQuery = sinon.stub(); + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const getSurveyReportAttachmentS3KeyStub = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachmentS3Key') + .resolves('key'); + + const sampleReq = { + keycloak_token: {}, + body: { attachments: [], security_ids: [] }, + params: { + projectId: 1, + attachmentId: 1 + }, + query: { + attachmentType: 'Report' + } + } as any; - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); + const getS3SignedURLStub = sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); + let actualResult: any = null; - sinon.stub(survey_queries, 'getSurveyAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); + const sampleRes = { + status: () => { + return { + json: (result: any) => { + actualResult = result; + } + }; + } + }; const result = get_signed_url.getSurveyAttachmentSignedURL(); await result(sampleReq, sampleRes as any, (null as unknown) as any); expect(actualResult).to.eql('myurlsigned.com'); + expect(getSurveyReportAttachmentS3KeyStub).to.be.calledOnce; + expect(getS3SignedURLStub).to.be.calledOnce; }); }); - describe('report attachments', () => { - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + describe('non report attachments', () => { + it('should return the signed url response on success', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const getSurveyAttachmentS3KeyStub = sinon + .stub(AttachmentService.prototype, 'getSurveyAttachmentS3Key') + .resolves('key'); + + const sampleReq = { + keycloak_token: {}, + body: { attachments: [], security_ids: [] }, + params: { + projectId: 1, + attachmentId: 1 + }, + query: { + attachmentType: 'Other' } - }); + } as any; - sinon.stub(survey_queries, 'getSurveyReportAttachmentS3KeySQL').returns(null); + const getS3SignedURLStub = sinon.stub(file_utils, 'getS3SignedURL').resolves(); - try { - const result = get_signed_url.getSurveyAttachmentSignedURL(); + let actualResult: any = null; - await result( - { - ...sampleReq, - query: { - attachmentType: ATTACHMENT_TYPE.REPORT + const sampleRes = { + status: () => { + return { + json: (result: any) => { + actualResult = result; } - }, - sampleRes as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build report attachment S3 key SQLstatement'); - } - }); - - it('should return the signed url response on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyReportAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); + }; + } + }; const result = get_signed_url.getSurveyAttachmentSignedURL(); - await result( - { - ...sampleReq, - query: { - attachmentType: ATTACHMENT_TYPE.REPORT - } - }, - sampleRes as any, - (null as unknown) as any - ); + await result(sampleReq, sampleRes as any, (null as unknown) as any); - expect(actualResult).to.eql('myurlsigned.com'); + expect(actualResult).to.eql(null); + expect(getSurveyAttachmentS3KeyStub).to.be.calledOnce; + expect(getS3SignedURLStub).to.be.calledOnce; }); }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.ts index 3554067202..9b52eaa01c 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/getSignedUrl.ts @@ -1,11 +1,10 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../../../constants/attachments'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; +import { PROJECT_ROLE } from '../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../../services/attachment-service'; import { getS3SignedURL } from '../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../utils/logger'; @@ -31,7 +30,7 @@ GET.apiDoc = { tags: ['attachment'], security: [ { - Bearer: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR] + Bearer: [] } ], parameters: [ @@ -39,7 +38,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -47,7 +47,8 @@ GET.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -55,7 +56,8 @@ GET.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -108,35 +110,23 @@ export function getSurveyAttachmentSignedURL(): RequestHandler { req_body: req.body }); - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.query.attachmentType) { - throw new HTTP400('Missing required query param `attachmentType`'); - } - const connection = getDBConnection(req['keycloak_token']); try { await connection.open(); let s3Key; + const attachmentService = new AttachmentService(connection); + if (req.query.attachmentType === ATTACHMENT_TYPE.REPORT) { - s3Key = await getSurveyReportAttachmentS3Key( + s3Key = await attachmentService.getSurveyReportAttachmentS3Key( Number(req.params.surveyId), - Number(req.params.attachmentId), - connection + Number(req.params.attachmentId) ); } else { - s3Key = await getSurveyAttachmentS3Key( + s3Key = await attachmentService.getSurveyAttachmentS3Key( Number(req.params.surveyId), - Number(req.params.attachmentId), - connection + Number(req.params.attachmentId) ); } await connection.commit(); @@ -157,43 +147,3 @@ export function getSurveyAttachmentSignedURL(): RequestHandler { } }; } - -export const getSurveyAttachmentS3Key = async ( - surveyId: number, - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.getSurveyAttachmentS3KeySQL(surveyId, attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build attachment S3 key SQLstatement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to get attachment S3 key'); - } - - return response.rows[0].key; -}; - -export const getSurveyReportAttachmentS3Key = async ( - surveyId: number, - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.getSurveyReportAttachmentS3KeySQL(surveyId, attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build report attachment S3 key SQLstatement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to get attachment S3 key'); - } - - return response.rows[0].key; -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure.test.ts deleted file mode 100644 index 3e1d7e5e28..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure.test.ts +++ /dev/null @@ -1,190 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import security_queries from '../../../../../../../queries/security'; -import { getMockDBConnection } from '../../../../../../../__mocks__/db'; -import * as makeSecure from './makeSecure'; - -chai.use(sinonChai); - -describe('makeSurveyAttachmentSecure', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - attachmentId: 2, - surveyId: 3 - }, - body: { - attachmentType: 'Image' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when surveyId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param `attachmentType`'); - } - }); - - it('should throw an error when fails to build secureAttachmentRecordSQL statement', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(null); - - try { - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL secure record statement'); - } - }); - - it('should throw an error when fails to secure record', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: null - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(SQL`something`); - - try { - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to secure record'); - } - }); - - it('should work on success when type is not Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(1); - }); - - it('should work on success when type is Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'secureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeSecure.makeSurveyAttachmentSecure(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: 'Report' } }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.equal(1); - }); -}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure.ts deleted file mode 100644 index 04a15137a8..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure.ts +++ /dev/null @@ -1,158 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../../../constants/roles'; -import { getDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../../../../utils/logger'; - -const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeSecure'); - -export const PUT: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - makeSurveyAttachmentSecure() -]; - -PUT.apiDoc = { - description: 'Make security status of a survey attachment secure.', - tags: ['attachment', 'security_status'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'surveyId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'attachmentId', - schema: { - type: 'number' - }, - required: true - } - ], - requestBody: { - description: 'Current attachment type for survey attachment.', - content: { - 'application/json': { - schema: { - type: 'object' - } - } - } - }, - responses: { - 200: { - description: 'Survey attachment make secure security status response.', - content: { - 'application/json': { - schema: { - title: 'Row count of record for which security status has been made secure', - type: 'number' - } - } - } - }, - 401: { - $ref: '#/components/responses/401' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function makeSurveyAttachmentSecure(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'Make security status of a survey attachment secure', - message: 'params', - req_params: req.params - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.body || !req.body.attachmentType) { - throw new HTTP400('Missing required body param `attachmentType`'); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const secureRecordSQLStatement = - req.body.attachmentType === 'Report' - ? queries.security.secureAttachmentRecordSQL( - Number(req.params.attachmentId), - 'survey_report_attachment', - Number(req.params.projectId) - ) - : queries.security.secureAttachmentRecordSQL( - Number(req.params.attachmentId), - 'survey_attachment', - Number(req.params.projectId) - ); - - if (!secureRecordSQLStatement) { - throw new HTTP400('Failed to build SQL secure record statement'); - } - - const secureRecordSQLResponse = await connection.query( - secureRecordSQLStatement.text, - secureRecordSQLStatement.values - ); - - if (!secureRecordSQLResponse || !secureRecordSQLResponse.rowCount) { - throw new HTTP400('Failed to secure record'); - } - - await connection.commit(); - - return res.status(200).json(1); - } catch (error) { - defaultLog.error({ label: 'makeSurveyAttachmentSecure', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure.test.ts deleted file mode 100644 index 3c952ca7b4..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure.test.ts +++ /dev/null @@ -1,223 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import security_queries from '../../../../../../../queries/security'; -import { getMockDBConnection } from '../../../../../../../__mocks__/db'; -import * as makeUnsecure from './makeUnsecure'; - -chai.use(sinonChai); - -describe('makeSurveyAttachmentUnsecure', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - surveyId: 3, - attachmentId: 2 - }, - body: { - securityToken: 'sometoken', - attachmentType: 'Image' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when surveyId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when request body is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result({ ...sampleReq, body: null }, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required request body'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result( - { ...sampleReq, body: { attachmentType: null, securityToken: 'sometoken' } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required request body'); - } - }); - - it('should throw an error when securityToken is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result( - { ...sampleReq, body: { attachmentType: 'Image', securityToken: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required request body'); - } - }); - - it('should throw an error when fails to build unsecureRecordSQL statement', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(null); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL unsecure record statement'); - } - }); - - it('should throw an error when fails to unsecure record', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: null - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - try { - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to unsecure record'); - } - }); - - it('should work on success when type is not Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(1); - }); - - it('should work on success when type is Report', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, query: mockQuery }); - sinon.stub(security_queries, 'unsecureAttachmentRecordSQL').returns(SQL`something`); - - const result = makeUnsecure.makeSurveyAttachmentUnsecure(); - - await result( - { ...sampleReq, body: { ...sampleReq.body, attachmentType: 'Report' } }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.equal(1); - }); -}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure.ts deleted file mode 100644 index 2b63545e3f..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure.ts +++ /dev/null @@ -1,150 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../../../constants/roles'; -import { getDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; -import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; -import { getLogger } from '../../../../../../../utils/logger'; - -const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/makeUnsecure'); - -export const PUT: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - makeSurveyAttachmentUnsecure() -]; - -PUT.apiDoc = { - description: 'Make security status of a survey attachment unsecure.', - tags: ['attachment', 'security_status'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'surveyId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'attachmentId', - schema: { - type: 'number' - }, - required: true - } - ], - requestBody: { - description: 'Current security token value and attachment type for survey attachment.', - content: { - 'application/json': { - schema: { - type: 'object' - } - } - } - }, - responses: { - 200: { - description: 'Survey attachment make unsecure security status response.', - content: { - 'application/json': { - schema: { - title: 'Row count of record for which security status has been made unsecure', - type: 'number' - } - } - } - }, - 401: { - $ref: '#/components/responses/401' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function makeSurveyAttachmentUnsecure(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'Make security status of a survey attachment unsecure', - message: 'params', - req_params: req.params - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.body || !req.body.attachmentType || !req.body.securityToken) { - throw new HTTP400('Missing required request body'); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const unsecureRecordSQLStatement = - req.body.attachmentType === 'Report' - ? queries.security.unsecureAttachmentRecordSQL('survey_report_attachment', req.body.securityToken) - : queries.security.unsecureAttachmentRecordSQL('survey_attachment', req.body.securityToken); - - if (!unsecureRecordSQLStatement) { - throw new HTTP400('Failed to build SQL unsecure record statement'); - } - - const unsecureRecordSQLResponse = await connection.query( - unsecureRecordSQLStatement.text, - unsecureRecordSQLStatement.values - ); - - if (!unsecureRecordSQLResponse || !unsecureRecordSQLResponse.rowCount) { - throw new HTTP400('Failed to unsecure record'); - } - - await connection.commit(); - - return res.status(200).json(1); - } catch (error) { - defaultLog.error({ label: 'makeSurveyAttachmentUnsecure', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.test.ts index 9786ad9b91..2d0823c39d 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.test.ts @@ -2,178 +2,91 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../../errors/http-error'; +import { + IProjectReportAttachment, + IReportAttachmentAuthor +} from '../../../../../../../../repositories/attachment-repository'; +import { AttachmentService } from '../../../../../../../../services/attachment-service'; import { getMockDBConnection } from '../../../../../../../../__mocks__/db'; -import * as get_survey_metadata from './get'; +import * as get from './get'; chai.use(sinonChai); -describe('gets metadata for a survey report', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1, - surveyId: 1, - attachmentId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - +describe('getSurveyReportDetails', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { + it('should throw an error if failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = get_survey_metadata.getSurveyReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no surveyId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_survey_metadata.getSurveyReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw a 400 error when no attachmentId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_survey_metadata.getSurveyReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectReportAttachmentSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'getSurveyReportAttachmentSQL').returns(null); - - try { - const result = get_survey_metadata.getSurveyReportMetaData(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build metadata SQLStatement'); - } - }); - - it('should throw a 400 error when no sql statement returned for getSurveyReportAuthorsSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 + }, + body: {} + } as any; - sinon.stub(survey_queries, 'getSurveyReportAuthorsSQL').returns(null); + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'getSurveyReportAttachmentById').rejects(expectedError); try { - const result = get_survey_metadata.getSurveyReportMetaData(); + const result = get.getSurveyReportDetails(); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + await result(mockReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build metadata SQLStatement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should return a project report metadata, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - attachment_id: 1, - title: 'My report', - update_date: '2020-10-10', - description: 'some description', - year_published: 2020, - revision_count: '1' - } - ] - }); - mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ first_name: 'John', last_name: 'Smith' }] }); + it('should succeed with valid params', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const mockReq = { + keycloak_token: {}, + params: { + projectId: 1, + attachmentId: 2 }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyReportAttachmentSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getSurveyReportAuthorsSQL').returns(SQL`something`); - - const result = get_survey_metadata.getSurveyReportMetaData(); + body: {} + } as any; + + const getSurveyReportAttachmentByIdStub = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachmentById') + .resolves(({ report: 1 } as unknown) as IProjectReportAttachment); + + const getSurveyAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'getSurveyAttachmentAuthors') + .resolves([({ author: 2 } as unknown) as IReportAttachmentAuthor]); + + const expectedResponse = { + metadata: { report: 1 }, + authors: [{ author: 2 }] + }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = get.getSurveyReportDetails(); + await result(mockReq, (sampleRes as unknown) as any, (null as unknown) as any); - expect(actualResult).to.be.eql({ - attachment_id: 1, - title: 'My report', - last_modified: '2020-10-10', - description: 'some description', - year_published: 2020, - revision_count: '1', - authors: [{ first_name: 'John', last_name: 'Smith' }] - }); + expect(actualResult).to.eql(expectedResponse); + expect(getSurveyReportAttachmentByIdStub).to.be.calledOnce; + expect(getSurveyAttachmentAuthorsStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.ts index cec171ecf7..01420a0b70 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/get.ts @@ -1,11 +1,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../../../../../../../constants/roles'; +import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../../errors/custom-error'; -import { GetReportAttachmentMetadata } from '../../../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../../../services/attachment-service'; import { getLogger } from '../../../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/getSignedUrl'); @@ -22,15 +20,15 @@ export const GET: Operation = [ ] }; }), - getSurveyReportMetaData() + getSurveyReportDetails() ]; GET.apiDoc = { - description: 'Retrieves the report metadata of a project attachment if filetype is Report.', + description: 'Retrieves the report metadata of a survey attachment if filetype is Report.', tags: ['attachment'], security: [ { - Bearer: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR] + Bearer: [] } ], parameters: [ @@ -38,7 +36,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -46,7 +45,8 @@ GET.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -54,7 +54,8 @@ GET.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -65,36 +66,43 @@ GET.apiDoc = { content: { 'application/json': { schema: { + title: 'metadata get response object', type: 'object', - required: [ - 'attachment_id', - 'title', - 'last_modified', - 'description', - 'year_published', - 'revision_count', - 'authors' - ], + required: ['metadata', 'authors'], properties: { - attachment_id: { - type: 'number' - }, - title: { - type: 'string' - }, - last_modified: { - type: 'string' - }, - description: { - type: 'string' - }, - year_published: { - type: 'number' - }, - revision_count: { - type: 'number' + metadata: { + description: 'Report metadata general information object', + type: 'object', + required: ['id', 'title', 'last_modified', 'description', 'year_published', 'revision_count'], + properties: { + id: { + description: 'Report metadata attachment id', + type: 'number' + }, + title: { + description: 'Report metadata attachment title ', + type: 'string' + }, + last_modified: { + description: 'Report metadata last modified', + type: 'string' + }, + description: { + description: 'Report metadata description', + type: 'string' + }, + year_published: { + description: 'Report metadata year published', + type: 'number' + }, + revision_count: { + description: 'Report metadata revision count', + type: 'number' + } + } }, authors: { + description: 'Report metadata author object', type: 'array', items: { type: 'object', @@ -114,7 +122,6 @@ GET.apiDoc = { } } }, - 400: { $ref: '#/components/responses/400' }, @@ -133,7 +140,7 @@ GET.apiDoc = { } }; -export function getSurveyReportMetaData(): RequestHandler { +export function getSurveyReportDetails(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'getSurveyReportMetaData', @@ -142,56 +149,29 @@ export function getSurveyReportMetaData(): RequestHandler { req_query: req.query }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - const connection = getDBConnection(req['keycloak_token']); try { - const getProjectReportAttachmentSQLStatement = queries.survey.getSurveyReportAttachmentSQL( - Number(req.params.projectId), - Number(req.params.attachmentId) - ); - - const getProjectReportAuthorsSQLStatement = queries.survey.getSurveyReportAuthorsSQL( - Number(req.params.attachmentId) - ); - - if (!getProjectReportAttachmentSQLStatement || !getProjectReportAuthorsSQLStatement) { - throw new HTTP400('Failed to build metadata SQLStatement'); - } - await connection.open(); + const attachmentService = new AttachmentService(connection); - const reportMetaData = await connection.query( - getProjectReportAttachmentSQLStatement.text, - getProjectReportAttachmentSQLStatement.values + const surveyReportAttachment = await attachmentService.getSurveyReportAttachmentById( + Number(req.params.surveyId), + Number(req.params.attachmentId) ); - const reportAuthorsData = await connection.query( - getProjectReportAuthorsSQLStatement.text, - getProjectReportAuthorsSQLStatement.values - ); + const surveyReportAuthors = await attachmentService.getSurveyAttachmentAuthors(Number(req.params.attachmentId)); await connection.commit(); - const getReportMetaData = reportMetaData && reportMetaData.rows[0]; - - const getReportAuthorsData = reportAuthorsData && reportAuthorsData.rows; - - const reportMetaObj = new GetReportAttachmentMetadata(getReportMetaData, getReportAuthorsData); + const reportDetails = { + metadata: surveyReportAttachment, + authors: surveyReportAuthors + }; - return res.status(200).json(reportMetaObj); + return res.status(200).json(reportDetails); } catch (error) { - defaultLog.error({ label: 'getReportMetadata', message: 'error', error }); + defaultLog.error({ label: 'getSurveyDetails', message: 'error', error }); await connection.rollback(); throw error; } finally { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.test.ts index c8859abdf0..131b9bfdf4 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.test.ts @@ -2,11 +2,10 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../../__mocks__/db'; +import { HTTPError } from '../../../../../../../../errors/http-error'; +import { AttachmentService } from '../../../../../../../../services/attachment-service'; +import { getMockDBConnection } from '../../../../../../../../__mocks__/db'; import * as update_survey_metadata from './update'; chai.use(sinonChai); @@ -16,309 +15,100 @@ describe('updates metadata for a survey report', () => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '', - surveyId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no surveyId is provided', async () => { + it('should throw a 400 error when the response is null', async () => { const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw a 400 error when no attachmentId is provided', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '1', - attachmentId: '' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] + const sampleReq = { + keycloak_token: {}, + body: { + attachment_type: 'Report', + revision_count: 1, + attachment_meta: { + title: 'My report', + year_published: 2000, + description: 'report abstract', + authors: [ + { + first_name: 'John', + last_name: 'Smith' + } + ] + } + }, + params: { + projectId: '1', + attachmentId: '1' } - }; + } as any; - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'updateSurveyReportAttachmentMetadata').rejects(expectedError); try { - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); + const result = update_survey_metadata.updateSurveyReportMetadata(); - await requestHandler(mockReq, mockRes, mockNext); + await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw a 400 error when attachment_type is invalid', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'notAReport', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Invalid body param `attachment_type`'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); it('should update a survey report metadata, on success', async () => { const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] + const sampleReq = { + keycloak_token: {}, + body: { + attachment_type: 'Report', + revision_count: 1, + attachment_meta: { + title: 'My report', + year_published: 2000, + description: 'report abstract', + authors: [ + { + first_name: 'John', + last_name: 'Smith' + } + ] + } + }, + params: { + projectId: '1', + attachmentId: '1' } - }; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [{ id: 1 }] - }); - mockQuery.onCall(1).resolves({ - rowCount: 1, - rows: [{ id: 1 }] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.statusValue).to.equal(200); - }); - - it('should throw a 400 error when updateSurveyReportAttachmentMetadataSQL returns null', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' + } as any; + + const updateSurveyReportAttachmentMetadataStub = sinon + .stub(AttachmentService.prototype, 'updateSurveyReportAttachmentMetadata') + .resolves(); + const deleteSurveyReportAttachmentAuthorsStub = sinon + .stub(AttachmentService.prototype, 'deleteSurveyReportAttachmentAuthors') + .resolves(); + const insertSurveyReportAttachmentAuthorStub = sinon + .stub(AttachmentService.prototype, 'insertSurveyReportAttachmentAuthor') + .resolves(); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + send: (response: any) => { + actualResult = response; } - ] + }; } }; - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [{ id: 1 }] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(survey_queries, 'updateSurveyReportAttachmentMetadataSQL').returns(null); - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); + await requestHandler(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL update attachment report statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('should throw a 400 error when the response is null', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '1', - attachmentId: '1' - }; - mockReq.body = { - attachment_type: 'Report', - revision_count: 1, - attachment_meta: { - title: 'My report', - year_published: 2000, - description: 'report abstract', - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ] - } - }; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: null - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(survey_queries, 'updateSurveyReportAttachmentMetadataSQL').returns(SQL`something`); - - const requestHandler = update_survey_metadata.updateSurveyReportMetadata(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to update attachment report record'); - expect((actualError as HTTPError).status).to.equal(400); - } + expect(actualResult).to.equal(undefined); + expect(updateSurveyReportAttachmentMetadataStub).to.be.calledOnce; + expect(deleteSurveyReportAttachmentAuthorsStub).to.be.calledOnce; + expect(insertSurveyReportAttachmentAuthorStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.ts index cfa304c74f..dda6e228e4 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/attachments/{attachmentId}/metadata/update.ts @@ -1,16 +1,14 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { ATTACHMENT_TYPE } from '../../../../../../../../constants/attachments'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../../../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../../errors/custom-error'; +import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../../../database/db'; import { PutReportAttachmentMetadata } from '../../../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../../../../services/attachment-service'; import { getLogger } from '../../../../../../../../utils/logger'; -import { deleteSurveyReportAttachmentAuthors, insertSurveyReportAttachmentAuthor } from '../../report/upload'; -const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/metadata/update'); +const defaultLog = getLogger('`/api/project/{projectId}/survey/{surveyId}/attachments/${attachmentId}/metadata/update'); export const PUT: Operation = [ authorizeRequestHandler((req) => { @@ -32,7 +30,7 @@ PUT.apiDoc = { tags: ['attachment'], security: [ { - Bearer: [SYSTEM_ROLE.SYSTEM_ADMIN, SYSTEM_ROLE.PROJECT_CREATOR] + Bearer: [] } ], parameters: [ @@ -40,7 +38,8 @@ PUT.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -48,7 +47,8 @@ PUT.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -56,7 +56,8 @@ PUT.apiDoc = { in: 'path', name: 'attachmentId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -141,22 +142,6 @@ export function updateSurveyReportMetadata(): RequestHandler { req_body: req.body }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!Object.values(ATTACHMENT_TYPE).includes(req.body?.attachment_type)) { - throw new HTTP400('Invalid body param `attachment_type`'); - } - const connection = getDBConnection(req['keycloak_token']); try { @@ -168,23 +153,24 @@ export function updateSurveyReportMetadata(): RequestHandler { revision_count: req.body.revision_count }); + const attachmentService = new AttachmentService(connection); + // Update the metadata fields of the attachment record - await updateSurveyReportAttachmentMetadata( + await attachmentService.updateSurveyReportAttachmentMetadata( Number(req.params.surveyId), Number(req.params.attachmentId), - metadata, - connection + metadata ); // Delete any existing attachment author records - await deleteSurveyReportAttachmentAuthors(Number(req.params.attachmentId), connection); + await attachmentService.deleteSurveyReportAttachmentAuthors(Number(req.params.attachmentId)); const promises = []; // Insert any new attachment author records promises.push( metadata.authors.map((author) => - insertSurveyReportAttachmentAuthor(Number(req.params.attachmentId), author, connection) + attachmentService.insertSurveyReportAttachmentAuthor(Number(req.params.attachmentId), author) ) ); @@ -203,22 +189,3 @@ export function updateSurveyReportMetadata(): RequestHandler { } }; } - -const updateSurveyReportAttachmentMetadata = async ( - surveyId: number, - attachmentId: number, - metadata: PutReportAttachmentMetadata, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.updateSurveyReportAttachmentMetadataSQL(surveyId, attachmentId, metadata); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update attachment report statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to update attachment report record'); - } -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/delete.test.ts index ef6b94d9c8..0efb96d7b7 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/delete.test.ts @@ -1,15 +1,17 @@ -import { DeleteObjectOutput } from 'aws-sdk/clients/s3'; +import { S3 } from 'aws-sdk'; import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import survey_queries from '../../../../../queries/survey'; +import { HTTPError } from '../../../../../errors/http-error'; +import { IProjectAttachment } from '../../../../../repositories/attachment-repository'; +import { AttachmentService } from '../../../../../services/attachment-service'; +import { PlatformService } from '../../../../../services/platform-service'; +import { SurveyService } from '../../../../../services/survey-service'; import * as file_utils from '../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as delete_survey from './delete'; +import * as del from './delete'; chai.use(sinonChai); @@ -18,138 +20,118 @@ describe('deleteSurvey', () => { sinon.restore(); }); - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - surveyId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when surveyId is missing', async () => { + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = delete_survey.deleteSurvey(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); + const expectedError = new Error('cannot process request'); + sinon.stub(AttachmentService.prototype, 'getSurveyAttachments').rejects(expectedError); - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 } - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(null); + } as any; try { - const result = delete_survey.deleteSurvey(); + const result = del.deleteSurvey(); await result(sampleReq, (null as unknown) as any, (null as unknown) as any); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect((actualError as HTTPError).message).to.equal(expectedError.message); } }); - it('should throw a 400 error when failed to get result for survey attachments', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); + it('should delete Survey if no S3 files deleted return', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(SQL`something`); + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; + + const getSurveyAttachmentsStub = sinon + .stub(AttachmentService.prototype, 'getSurveyAttachments') + .resolves([({ key: 'key' } as unknown) as IProjectAttachment]); + + const deleteSurveyStub = sinon.stub(SurveyService.prototype, 'deleteSurvey').resolves(); + + const fileUtilsStub = sinon + .stub(file_utils, 'deleteFileFromS3') + .resolves((false as unknown) as S3.DeleteObjectOutput); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - try { - const result = delete_survey.deleteSurvey(); + const result = del.deleteSurvey(); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get survey attachments'); - } + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(null); + expect(getSurveyAttachmentsStub).to.be.calledOnce; + expect(deleteSurveyStub).to.be.calledOnce; + expect(fileUtilsStub).to.be.calledOnce; }); - it('should return null when deleting file from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'deleteSurveySQL').returns(SQL`something`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves(null); - - const result = delete_survey.deleteSurvey(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); + it('should delete Survey in db and s3', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - expect(actualResult).to.equal(null); - }); + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; - it('should return true boolean response on success', async () => { - const mockQuery = sinon.stub(); + const getSurveyAttachmentsStub = sinon + .stub(AttachmentService.prototype, 'getSurveyAttachments') + .resolves([({ key: 'key' } as unknown) as IProjectAttachment]); - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); + const deleteSurveyStub = sinon.stub(SurveyService.prototype, 'deleteSurvey').resolves(); - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); + const fileUtilsStub = sinon + .stub(file_utils, 'deleteFileFromS3') + .resolves((true as unknown) as S3.DeleteObjectOutput); - sinon.stub(survey_queries, 'getSurveyAttachmentsSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'deleteSurveySQL').returns(SQL`something`); - sinon.stub(file_utils, 'deleteFileFromS3').resolves('non null response' as DeleteObjectOutput); + const submitDwCAMetadataPackageStub = sinon.stub(PlatformService.prototype, 'submitDwCAMetadataPackage').resolves(); - const result = delete_survey.deleteSurvey(); + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; - await result(sampleReq, sampleRes as any, (null as unknown) as any); + const result = del.deleteSurvey(); - expect(actualResult).to.equal(true); + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(true); + expect(getSurveyAttachmentsStub).to.be.calledOnce; + expect(deleteSurveyStub).to.be.calledOnce; + expect(fileUtilsStub).to.be.calledOnce; + expect(submitDwCAMetadataPackageStub).to.be.calledOnce; }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/delete.ts index 634bd34b34..292206d03c 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/delete.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/delete.ts @@ -1,10 +1,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { queries } from '../../../../../queries/queries'; +import { getDBConnection } from '../../../../../database/db'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; +import { AttachmentService } from '../../../../../services/attachment-service'; +import { PlatformService } from '../../../../../services/platform-service'; +import { SurveyService } from '../../../../../services/survey-service'; import { deleteFileFromS3 } from '../../../../../utils/file-utils'; import { getLogger } from '../../../../../utils/logger'; @@ -76,11 +77,8 @@ DELETE.apiDoc = { export function deleteSurvey(): RequestHandler { return async (req, res) => { - defaultLog.debug({ label: 'Delete survey', message: 'params', req_params: req.params }); - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } + const projectId = Number(req.params.projectId); + const surveyId = Number(req.params.surveyId); const connection = getDBConnection(req['keycloak_token']); @@ -91,15 +89,17 @@ export function deleteSurvey(): RequestHandler { * Get the attachment S3 keys for all attachments associated to this survey * Used to delete them from S3 separately later */ - const surveyAttachmentS3Keys: string[] = await getSurveyAttachmentS3Keys(Number(req.params.surveyId), connection); + const attachmentService = new AttachmentService(connection); + + const surveyAttachments = await attachmentService.getSurveyAttachments(surveyId); + const surveyAttachmentS3Keys: string[] = surveyAttachments.map((attachment) => attachment.key); /** * PART 2 * Delete the survey and all associated records/resources from our DB */ - const deleteSurveySQLStatement = queries.survey.deleteSurveySQL(Number(req.params.surveyId)); - - await connection.query(deleteSurveySQLStatement.text, deleteSurveySQLStatement.values); + const surveyService = new SurveyService(connection); + await surveyService.deleteSurvey(surveyId); /** * PART 3 @@ -111,6 +111,14 @@ export function deleteSurvey(): RequestHandler { return res.status(200).json(null); } + try { + const platformService = new PlatformService(connection); + await platformService.submitDwCAMetadataPackage(projectId); + } catch (error) { + // Don't fail the rest of the endpoint if submitting metadata fails + defaultLog.error({ label: 'deleteSurvey->submitDwCAMetadataPackage', message: 'error', error }); + } + await connection.commit(); return res.status(200).json(true); @@ -123,22 +131,3 @@ export function deleteSurvey(): RequestHandler { } }; } - -export const getSurveyAttachmentS3Keys = async (surveyId: number, connection: IDBConnection) => { - const getSurveyAttachmentSQLStatement = queries.survey.getSurveyAttachmentsSQL(surveyId); - - if (!getSurveyAttachmentSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const getResult = await connection.query( - getSurveyAttachmentSQLStatement.text, - getSurveyAttachmentSQLStatement.values - ); - - if (!getResult || !getResult.rows) { - throw new HTTP400('Failed to get survey attachments'); - } - - return getResult.rows.map((attachment: any) => attachment.key); -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.test.ts index 8e967cdf57..3e8e3d7d89 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.test.ts @@ -1,11 +1,20 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; +import OpenAPIRequestValidator, { OpenAPIRequestValidatorArgs } from 'openapi-request-validator'; +import OpenAPIResponseValidator, { OpenAPIResponseValidatorArgs } from 'openapi-response-validator'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; +import { + MESSAGE_CLASS_NAME, + SUBMISSION_MESSAGE_TYPE, + SUBMISSION_STATUS_TYPE +} from '../../../../../../../constants/status'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../queries/survey'; +import { + IGetLatestSurveyOccurrenceSubmission, + SurveyRepository +} from '../../../../../../../repositories/survey-repository'; +import { SurveyService } from '../../../../../../../services/survey-service'; import { getMockDBConnection } from '../../../../../../../__mocks__/db'; import * as observationSubmission from './get'; @@ -39,69 +48,791 @@ describe('getObservationSubmission', () => { sinon.restore(); }); - it('should throw a 400 error when no surveyId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = observationSubmission.getOccurrenceSubmission(); - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); + describe('openApiScheme', () => { + const requestSchema = (observationSubmission.GET.apiDoc as unknown) as OpenAPIRequestValidatorArgs; + const responseSchema = (observationSubmission.GET.apiDoc as unknown) as OpenAPIResponseValidatorArgs; + + describe('request validation', () => { + const requestValidator = new OpenAPIRequestValidator(requestSchema); + + describe('should throw an error when', () => { + describe('projectId', () => { + it('is missing', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + surveyId: 5 + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('projectId'); + expect(response.errors[0].message).to.equal("must have required property 'projectId'"); + }); + + it('is null', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: null, + surveyId: 5 + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('projectId'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('is not a number', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: '12', + surveyId: 5 + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('projectId'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('is less than 1', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: 0, + surveyId: 5 + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('projectId'); + expect(response.errors[0].message).to.equal('must be >= 1'); + }); + }); + + describe('surveyId', () => { + it('is missing', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: 2 + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('surveyId'); + expect(response.errors[0].message).to.equal("must have required property 'surveyId'"); + }); + + it('is null', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: 2, + surveyId: null + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('surveyId'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('is not a number', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: 2, + surveyId: '15' + } + }; + + const response = requestValidator.validateRequest(request); + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('surveyId'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('is less than 1', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: 2, + surveyId: 0 + } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('surveyId'); + expect(response.errors[0].message).to.equal('must be >= 1'); + }); + }); + }); - it('should throw a 400 error when no sql statement returned for getLatestSurveyOccurrenceSubmission', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); + describe('should succeed when', () => { + it('is provided with valid params', () => { + const request = { + headers: { 'content-type': 'application/json' }, + params: { + projectId: 2, + surveyId: 5 + } + }; - sinon.stub(survey_queries, 'getLatestSurveyOccurrenceSubmissionSQL').returns(null); + const response = requestValidator.validateRequest(request); - try { - const result = observationSubmission.getOccurrenceSubmission(); + expect(response).to.equal(undefined); + }); + }); + }); - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal( - 'Failed to build SQL getLatestSurveyOccurrenceSubmissionSQL statement' - ); - } - }); + describe('response validation', () => { + const responseValidator = new OpenAPIResponseValidator(responseSchema); + + describe('should throw an error when', () => { + it('returns a non-object response', () => { + const apiResponse = 'test-response'; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('response'); + expect(response.errors[0].message).to.equal('must be object,null'); + }); + + describe('id', () => { + it('is missing', () => { + const apiResponse = { + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('response'); + expect(response.errors[0].message).to.equal("must have required property 'id'"); + }); + + it('is null', () => { + const apiResponse = { + id: null, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].path).to.equal('id'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('is not a number', () => { + const apiResponse = { + id: '12', + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].path).to.equal('id'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('is less than 1', () => { + const apiResponse = { + id: 0, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].path).to.equal('id'); + expect(response.errors[0].message).to.equal('must be >= 1'); + }); + }); + + describe('inputFileName', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('response'); + expect(response.errors[0].message).to.equal("must have required property 'inputFileName'"); + }); + + it('is null', () => { + const apiResponse = { + id: 1, + inputFileName: null, + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('inputFileName'); + expect(response.errors[0].message).to.equal('must be string'); + }); + + it('is not a string', () => { + const apiResponse = { + id: 1, + inputFileName: { filename: 'filename' }, + status: 'validation-status', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('inputFileName'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + + describe('status', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('response'); + expect(response.errors[0].message).to.equal("must have required property 'status'"); + }); + + it('is not a string', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: { status: 'status' }, + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('status'); + expect(response.errors[0].message).to.equal('must be string,null'); + }); + }); + + describe('isValidating', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('response'); + expect(response.errors[0].message).to.equal("must have required property 'isValidating'"); + }); + + it('is not a bool', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: 'true', + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('isValidating'); + expect(response.errors[0].message).to.equal('must be boolean'); + }); + }); + + describe('messageTypes', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + isValidating: false, + status: 'validation-status' + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('response'); + expect(response.errors[0].message).to.equal("must have required property 'messageTypes'"); + }); + + it('is not an array', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: 'message-types' + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes'); + expect(response.errors[0].message).to.equal('must be array'); + }); + + describe('messageType', () => { + it('is not an object', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: ['message-type'] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0'); + expect(response.errors[0].message).to.equal('must be object'); + }); + + describe('severityLabel', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0'); + expect(response.errors[0].message).to.equal("must have required property 'severityLabel'"); + }); + + it('is not a string', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: { label: 'label ' }, + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/severityLabel'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + + describe('messageStatus', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageStatus: 'message-status', + messages: [] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0'); + expect(response.errors[0].message).to.equal("must have required property 'messageTypeLabel'"); + }); + + it('is not a string', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: { label: 'label ' }, + messageStatus: 'message-status', + messages: [] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messageTypeLabel'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + + describe('messageStatus', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messages: [] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0'); + expect(response.errors[0].message).to.equal("must have required property 'messageStatus'"); + }); + + it('is not a string', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: { status: 'status' }, + messages: [] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messageStatus'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + + describe('messages', () => { + it('is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status' + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0'); + expect(response.errors[0].message).to.equal("must have required property 'messages'"); + }); + + it('is not an array', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: 'messages' + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messages'); + expect(response.errors[0].message).to.equal('must be array'); + }); + + describe('message', () => { + it('is not an object', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: ['messages'] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messages/0'); + expect(response.errors[0].message).to.equal('must be object'); + }); + + it('id is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [ + { + message: 'test-message' + } + ] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messages/0'); + expect(response.errors[0].message).to.equal("must have required property 'id'"); + }); + + it('id is not number', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [ + { + id: '12', + message: 'test-message' + } + ] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messages/0/id'); + expect(response.errors[0].message).to.equal('must be number'); + }); + + it('message is missing', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [ + { + id: 1 + } + ] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messages/0'); + expect(response.errors[0].message).to.equal("must have required property 'message'"); + }); + + it('message is not string', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [ + { + id: 1, + message: { test: 'test-message' } + } + ] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].path).to.equal('messageTypes/0/messages/0/message'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + }); + }); + }); + }); - it('should return an observation submission, on success with no rejected files', async () => { - const mockQuery = sinon.stub(); + describe('should succeed when', () => { + it('returns a null response', () => { + const apiResponse = null; - mockQuery.resolves({ - rows: [ - { - id: 13, - input_file_name: 'dwca_moose.zip', - submission_status_type_name: 'Darwin Core Validated', - messages: [{}] - } - ] + const response = responseValidator.validateResponse(200, apiResponse); + expect(response).to.equal(undefined); + }); + + it('status is null', () => { + const apiResponse = { + id: 1, + status: null, + inputFileName: 'filename.xlsx', + isValidating: false, + messageTypes: [] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response).to.equal(undefined); + }); + + it('has valid response values', () => { + const apiResponse = { + id: 1, + inputFileName: 'filename.xlsx', + status: 'validation-status', + isValidating: false, + messageTypes: [ + { + severityLabel: 'severity-label', + messageTypeLabel: 'type-label', + messageStatus: 'message-status', + messages: [ + { + id: 1, + message: 'test-message' + } + ] + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response).to.equal(undefined); + }); + }); }); + }); + it('should return an observation submission, on success with no rejected files', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getLatestSurveyOccurrenceSubmissionSQL').returns(SQL`something`); + sinon.stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission').resolves(({ + id: 13, + input_file_name: 'dwca_moose.zip', + submission_status_type_name: 'Darwin Core Validated' + } as unknown) as IGetLatestSurveyOccurrenceSubmission); const result = observationSubmission.getOccurrenceSubmission(); @@ -111,93 +842,41 @@ describe('getObservationSubmission', () => { id: 13, inputFileName: 'dwca_moose.zip', status: 'Darwin Core Validated', - messages: [] - }); - }); - - it('should throw a 400 error with rejected files when failed to getOccurrenceSubmissionMessagesSQL', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [ - { - id: 13, - input_file_name: 'dwca_moose.zip', - message: 'some message', - submission_status_type_name: 'Rejected' - } - ] + isValidating: false, + messageTypes: [] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'getLatestSurveyOccurrenceSubmissionSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getOccurrenceSubmissionMessagesSQL').returns(null); - - try { - const result = observationSubmission.getOccurrenceSubmission(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal( - 'Failed to build SQL getOccurrenceSubmissionMessagesSQL statement' - ); - } }); it('should return an observation submission on success, with rejected files', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - input_file_name: 'dwca_moose.zip', - messages: [], - submission_status_type_name: 'Rejected' - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - errorCode: 'Missing Required Header', - id: 1, - message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header', - status: 'Rejected', - type: 'Error' - }, - { - errorCode: 'Missing Required Header', - id: 2, - message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header', - status: 'Rejected', - type: 'Error' - } - ] - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getLatestSurveyOccurrenceSubmissionSQL').returns(SQL`something`); - sinon.stub(survey_queries, 'getOccurrenceSubmissionMessagesSQL').returns(SQL`something`); + sinon.stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission').resolves(({ + id: 13, + input_file_name: 'dwca_moose.zip', + submission_status_type_name: 'Rejected' + } as unknown) as IGetLatestSurveyOccurrenceSubmission); + + sinon.stub(SurveyRepository.prototype, 'getOccurrenceSubmissionMessages').resolves([ + { + id: 1, + message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header', + status: SUBMISSION_STATUS_TYPE.REJECTED, + type: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, + class: MESSAGE_CLASS_NAME.ERROR + }, + { + id: 2, + message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header', + status: SUBMISSION_STATUS_TYPE.REJECTED, + type: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, + class: MESSAGE_CLASS_NAME.ERROR + } + ]); const result = observationSubmission.getOccurrenceSubmission(); @@ -207,39 +886,38 @@ describe('getObservationSubmission', () => { id: 13, inputFileName: 'dwca_moose.zip', status: 'Rejected', - messages: [ + isValidating: false, + messageTypes: [ { - errorCode: 'Missing Required Header', - id: 1, - message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header', - status: 'Rejected', - type: 'Error' - }, - { - errorCode: 'Missing Required Header', - id: 2, - message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header', - status: 'Rejected', - type: 'Error' + severityLabel: 'Error', + messageTypeLabel: 'Missing Required Header', + messageStatus: 'Rejected', + messages: [ + { + id: 1, + message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header' + }, + { + id: 2, + message: 'occurrence.txt - Missing Required Header - associatedTaxa - Missing required header' + } + ] } ] }); }); it('should return null if the survey has no observation submission, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: undefined }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getLatestSurveyOccurrenceSubmissionSQL').returns(SQL`something`); + sinon + .stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission') + .resolves(({ delete_timestamp: true } as unknown) as IGetLatestSurveyOccurrenceSubmission); const result = observationSubmission.getOccurrenceSubmission(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.ts index a65ddc22f0..b7853cc5f9 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/get.ts @@ -1,10 +1,10 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../constants/roles'; +import { SUBMISSION_STATUS_TYPE } from '../../../../../../../constants/status'; import { getDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { IMessageTypeGroup, SurveyService } from '../../../../../../../services/survey-service'; import { getLogger } from '../../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/observation/submission/get'); @@ -37,7 +37,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'number', + minimum: 1 }, required: true }, @@ -45,7 +46,8 @@ GET.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'number', + minimum: 1 }, required: true } @@ -58,9 +60,11 @@ GET.apiDoc = { schema: { type: 'object', nullable: true, + required: ['id', 'inputFileName', 'status', 'isValidating', 'messageTypes'], properties: { id: { - type: 'number' + type: 'number', + minimum: 1 }, inputFileName: { description: 'The file name of the submission', @@ -71,12 +75,50 @@ GET.apiDoc = { nullable: true, type: 'string' }, - messages: { - description: 'The validation status messages of the observation submission', + isValidating: { + description: 'True if the submission has not yet been validated, false otherwise', + type: 'boolean' + }, + messageTypes: { + description: 'An array containing all submission messages grouped by message type', type: 'array', items: { type: 'object', - description: 'A validation status message of the observation submission' + required: ['severityLabel', 'messageTypeLabel', 'messageStatus', 'messages'], + properties: { + severityLabel: { + type: 'string', + description: + 'The label of the "class" or severity of this type of message, e.g. "Error", "Warning", "Notice", etc.' + }, + messageTypeLabel: { + type: 'string', + description: 'The name of the type of error pertaining to this submission' + }, + messageStatus: { + type: 'string', + description: 'The resulting status of the submission as a consequence of the error' + }, + messages: { + type: 'array', + description: 'The array of submission messages belonging to this type of message', + items: { + type: 'object', + description: 'A submission message object belonging to a particular message type group', + required: ['id', 'message'], + properties: { + id: { + type: 'number', + description: 'The ID of this submission message' + }, + message: { + type: 'string', + description: 'The actual message which describes the concern in detail' + } + } + } + } + } } } } @@ -104,77 +146,60 @@ GET.apiDoc = { export function getOccurrenceSubmission(): RequestHandler { return async (req, res) => { - defaultLog.debug({ label: 'Get an occurrence submission', message: 'params', req_params: req.params }); - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } + defaultLog.debug({ + label: 'getOccurrenceSubmission', + description: 'Gets an occurrence submission', + req_params: req.params + }); const connection = getDBConnection(req['keycloak_token']); try { - const getOccurrenceSubmissionSQLStatement = queries.survey.getLatestSurveyOccurrenceSubmissionSQL( - Number(req.params.surveyId) - ); - - if (!getOccurrenceSubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL getLatestSurveyOccurrenceSubmissionSQL statement'); - } - await connection.open(); - const occurrenceSubmissionData = await connection.query( - getOccurrenceSubmissionSQLStatement.text, - getOccurrenceSubmissionSQLStatement.values - ); + const surveyService = new SurveyService(connection); + const occurrenceSubmission = await surveyService.getLatestSurveyOccurrenceSubmission(Number(req.params.surveyId)); - // Ensure we only retrieve the latest occurrence submission record if it has not been soft deleted - if ( - !occurrenceSubmissionData || - !occurrenceSubmissionData.rows || - !occurrenceSubmissionData.rows[0] || - occurrenceSubmissionData.rows[0].delete_timestamp - ) { + if (!occurrenceSubmission || occurrenceSubmission.delete_timestamp) { + // Ensure we only retrieve the latest occurrence submission record if it has not been soft deleted return res.status(200).json(null); } - let messageList: any[] = []; - - const errorStatus = occurrenceSubmissionData.rows[0].submission_status_type_name; - - if (errorStatus === 'Rejected' || errorStatus === 'System Error') { - const occurrence_submission_id = occurrenceSubmissionData.rows[0].id; - - const getSubmissionErrorListSQLStatement = queries.survey.getOccurrenceSubmissionMessagesSQL( - Number(occurrence_submission_id) - ); - - if (!getSubmissionErrorListSQLStatement) { - throw new HTTP400('Failed to build SQL getOccurrenceSubmissionMessagesSQL statement'); - } - - const submissionErrorListData = await connection.query( - getSubmissionErrorListSQLStatement.text, - getSubmissionErrorListSQLStatement.values - ); - - messageList = (submissionErrorListData && submissionErrorListData.rows) || []; - } - - await connection.commit(); - - const getOccurrenceSubmissionData = - (occurrenceSubmissionData && - occurrenceSubmissionData.rows && - occurrenceSubmissionData.rows[0] && { - id: occurrenceSubmissionData.rows[0].id, - inputFileName: occurrenceSubmissionData.rows[0].input_file_name, - status: occurrenceSubmissionData.rows[0].submission_status_type_name, - messages: messageList - }) || - null; - - return res.status(200).json(getOccurrenceSubmissionData); + const willFetchAdditionalMessages = + occurrenceSubmission.submission_status_type_name && + [ + // Submission statuses for validation/transformation failure + SUBMISSION_STATUS_TYPE.SYSTEM_ERROR, + SUBMISSION_STATUS_TYPE.FAILED_OCCURRENCE_PREPARATION, + SUBMISSION_STATUS_TYPE.FAILED_VALIDATION, + SUBMISSION_STATUS_TYPE.FAILED_TRANSFORMED, + SUBMISSION_STATUS_TYPE.FAILED_PROCESSING_OCCURRENCE_DATA, + SUBMISSION_STATUS_TYPE['AWAITING CURRATION'], + SUBMISSION_STATUS_TYPE.REJECTED, + SUBMISSION_STATUS_TYPE['ON HOLD'] + ].includes(occurrenceSubmission.submission_status_type_name); + + const isDoneValidating = + willFetchAdditionalMessages || + (occurrenceSubmission.submission_status_type_name && + [ + // Submission statuses for validation completion + SUBMISSION_STATUS_TYPE.TEMPLATE_VALIDATED, + SUBMISSION_STATUS_TYPE.DARWIN_CORE_VALIDATED, + SUBMISSION_STATUS_TYPE.TEMPLATE_TRANSFORMED + ].includes(occurrenceSubmission.submission_status_type_name)); + + const messageTypes: IMessageTypeGroup[] = willFetchAdditionalMessages + ? await surveyService.getOccurrenceSubmissionMessages(Number(occurrenceSubmission.id)) + : []; + + return res.status(200).json({ + id: occurrenceSubmission.id, + inputFileName: occurrenceSubmission.input_file_name, + status: occurrenceSubmission.submission_status_type_name || null, + isValidating: !isDoneValidating, + messageTypes + }); } catch (error) { defaultLog.error({ label: 'getOccurrenceSubmission', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.test.ts index 91b89c3662..6e7f08e44a 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.test.ts @@ -2,10 +2,9 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../errors/http-error'; +import { SurveyService } from '../../../../../../../services/survey-service'; import * as file_utils from '../../../../../../../utils/file-utils'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; import * as upload from './upload'; @@ -203,11 +202,13 @@ describe('uploadObservationSubmission', () => { systemUserId: () => { return 20; }, - query: mockQuery + knex: mockQuery }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); + sinon + .stub(SurveyService.prototype, 'insertSurveyOccurrenceSubmission') + .resolves({ submissionId: (undefined as unknown) as number }); const requestHandler = upload.uploadMedia(); @@ -220,52 +221,6 @@ describe('uploadObservationSubmission', () => { } }); - it('should throw a 400 error when it fails to get the update SQL', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveyOccurrenceSubmissionSQL').returns(null); - - const requestHandler = upload.uploadMedia(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL update statement'); - } - }); - it('should throw a 400 error when it fails to get the update the record in the database', async () => { const dbConnectionObj = getMockDBConnection(); @@ -287,20 +242,18 @@ describe('uploadObservationSubmission', () => { const mockQuery = sinon.stub(); - mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - mockQuery.onCall(1).resolves(null); + mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ submissionId: 1 }] }); + mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ submissionId: undefined }] }); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; }, - query: mockQuery + knex: mockQuery }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); const requestHandler = upload.uploadMedia(); @@ -334,20 +287,18 @@ describe('uploadObservationSubmission', () => { const mockQuery = sinon.stub(); - mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ id: 1 }] }); + mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ submissionId: 1 }] }); + mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ submissionId: 1 }] }); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; }, - query: mockQuery + knex: mockQuery }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); sinon.stub(file_utils, 'uploadFileToS3').rejects('Failed to insert occurrence submission data'); const requestHandler = upload.uploadMedia(); @@ -385,21 +336,19 @@ describe('uploadObservationSubmission', () => { const mockQuery = sinon.stub(); - mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - mockQuery.onCall(2).resolves({ rowCount: 1, rows: [{ id: 1 }] }); + mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ submissionId: 1 }] }); + mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ submissionId: 1 }] }); + mockQuery.onCall(2).resolves({ rowCount: 1, rows: [{ submissionId: 1 }] }); sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; }, - query: mockQuery + knex: mockQuery }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); sinon.stub(file_utils, 'uploadFileToS3').resolves({ key: 'projects/1/surveys/1/test.txt' } as any); const requestHandler = upload.uploadMedia(); @@ -407,49 +356,4 @@ describe('uploadObservationSubmission', () => { await requestHandler(mockReq, mockRes, mockNext); expect(mockRes.statusValue).to.equal(200); }); - - it('should throw a 400 error when it fails to get the insertSurveyOccurrenceSubmissionSQL SQL', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveyOccurrenceSubmissionSQL').returns(null); - - const requestHandler = upload.uploadMedia(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL insert statement'); - } - }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.ts index cfe15f1a84..4555db88d3 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/upload.ts @@ -2,9 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../constants/roles'; import { getDBConnection, IDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; +import { HTTP400 } from '../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { SurveyService } from '../../../../../../../services/survey-service'; import { generateS3FileKey, scanFileForVirus, uploadFileToS3 } from '../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../utils/logger'; @@ -152,7 +152,7 @@ export function uploadMedia(): RequestHandler { connection ); - const submissionId = response.rows[0].id; + const { submissionId } = response; const inputKey = generateS3FileKey({ projectId: Number(req.params.projectId), @@ -199,24 +199,20 @@ export const insertSurveyOccurrenceSubmission = async ( source: string, inputFileName: string, connection: IDBConnection -): Promise => { - const insertSqlStatement = queries.survey.insertSurveyOccurrenceSubmissionSQL({ +): Promise<{ submissionId: number }> => { + const surveyService = new SurveyService(connection); + + const response = await surveyService.insertSurveyOccurrenceSubmission({ surveyId, source, inputFileName }); - if (!insertSqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const insertResponse = await connection.query(insertSqlStatement.text, insertSqlStatement.values); - - if (!insertResponse.rowCount) { + if (!response.submissionId) { throw new HTTP400('Failed to insert survey occurrence submission record'); } - return insertResponse; + return response; }; /** @@ -231,18 +227,14 @@ export const updateSurveyOccurrenceSubmissionWithKey = async ( submissionId: number, inputKey: string, connection: IDBConnection -): Promise => { - const updateSqlStatement = queries.survey.updateSurveyOccurrenceSubmissionSQL({ submissionId, inputKey }); - - if (!updateSqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } +): Promise<{ submissionId: number }> => { + const surveyService = new SurveyService(connection); - const updateResponse = await connection.query(updateSqlStatement.text, updateSqlStatement.values); + const response = await surveyService.updateSurveyOccurrenceSubmission({ submissionId, inputKey }); - if (!updateResponse || !updateResponse.rowCount) { + if (!response.submissionId) { throw new HTTP400('Failed to update survey occurrence submission record'); } - return updateResponse; + return response; }; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.test.ts index ecafd04d1e..d914bea946 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.test.ts @@ -2,18 +2,14 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { OccurrenceService } from '../../../../../../../../services/occurrence-service'; import { getMockDBConnection } from '../../../../../../../../__mocks__/db'; import * as delete_submission from './delete'; chai.use(sinonChai); describe('deleteOccurrenceSubmission', () => { - const dbConnectionObj = getMockDBConnection(); - const sampleReq = { keycloak_token: {}, params: { @@ -39,119 +35,31 @@ describe('deleteOccurrenceSubmission', () => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { + it('should return false if no rows were deleted', async () => { + const dbConnectionObj = getMockDBConnection(); sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - try { - const result = delete_submission.deleteOccurrenceSubmission(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no surveyId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_submission.deleteOccurrenceSubmission(); - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw a 400 error when no submissionId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = delete_submission.deleteOccurrenceSubmission(); - await result( - { ...sampleReq, params: { ...sampleReq.params, submissionId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `submissionId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for deleteOccurrenceSubmissionSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'deleteOccurrenceSubmissionSQL').returns(null); - - try { - const result = delete_submission.deleteOccurrenceSubmission(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); - } - }); - - it('should return null when no rowCount', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rowCount: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(survey_queries, 'deleteOccurrenceSubmissionSQL').returns(SQL`something`); + sinon.stub(OccurrenceService.prototype, 'deleteOccurrenceSubmission').resolves([]); const result = delete_submission.deleteOccurrenceSubmission(); await result(sampleReq, sampleRes as any, (null as unknown) as any); - expect(actualResult).to.equal(null); + expect(actualResult).to.equal(false); }); - it('should return rowCount on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rowCount: 1 }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); + it('should return true if occurrence submission was deleted', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(survey_queries, 'deleteOccurrenceSubmissionSQL').returns(SQL`something`); + sinon + .stub(OccurrenceService.prototype, 'deleteOccurrenceSubmission') + .resolves([{ submission_spatial_component_id: 1 }]); const result = delete_submission.deleteOccurrenceSubmission(); await result(sampleReq, sampleRes as any, (null as unknown) as any); - expect(actualResult).to.equal(1); + expect(actualResult).to.equal(true); }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.ts index 52b77b1c24..32437b630f 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete.ts @@ -2,9 +2,8 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { OccurrenceService } from '../../../../../../../../services/occurrence-service'; import { getLogger } from '../../../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/delete'); @@ -60,12 +59,12 @@ DELETE.apiDoc = { ], responses: { 200: { - description: 'Observation submission csv details response.', + description: 'Boolean true value representing successful deletion.', content: { 'application/json': { schema: { - title: 'Row count of soft deleted records', - type: 'number' + title: 'Occurrence delete response', + type: 'boolean' } } } @@ -96,41 +95,18 @@ export function deleteOccurrenceSubmission(): RequestHandler { req_params: req.params }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.submissionId) { - throw new HTTP400('Missing required path param `submissionId`'); - } - const connection = getDBConnection(req['keycloak_token']); try { - const deleteSubmissionSQLStatement = queries.survey.deleteOccurrenceSubmissionSQL( - Number(req.params.submissionId) - ); - - if (!deleteSubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - await connection.open(); - const deleteResult = await connection.query( - deleteSubmissionSQLStatement.text, - deleteSubmissionSQLStatement.values - ); + const occurrenceService = new OccurrenceService(connection); - await connection.commit(); + const response = await occurrenceService.deleteOccurrenceSubmission(Number(req.params.submissionId)); - const deleteResponse = (deleteResult && deleteResult.rowCount) || null; + await connection.commit(); - return res.status(200).json(deleteResponse); + return res.status(200).json(!!response.length); } catch (error) { defaultLog.error({ label: 'deleteOccurrenceSubmission', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.test.ts index 77d192997d..183d8a5e76 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.test.ts @@ -2,10 +2,10 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../../errors/http-error'; +import { IOccurrenceSubmission } from '../../../../../../../../repositories/occurrence-repository'; +import { OccurrenceService } from '../../../../../../../../services/occurrence-service'; import * as file_utils from '../../../../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../../../../__mocks__/db'; import * as get_signed_url from './getSignedUrl'; @@ -94,42 +94,18 @@ describe('getSingleSubmissionURL', () => { } }); - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(null); - - try { - const result = get_signed_url.getSingleSubmissionURL(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - it('should return null when getting signed url from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); sinon.stub(file_utils, 'getS3SignedURL').resolves(null); + sinon + .stub(OccurrenceService.prototype, 'getOccurrenceSubmission') + .resolves(({ input_key: 'string' } as unknown) as IOccurrenceSubmission); const result = get_signed_url.getSingleSubmissionURL(); @@ -139,19 +115,17 @@ describe('getSingleSubmissionURL', () => { }); it('should return the signed url response on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`some query`); + sinon + .stub(OccurrenceService.prototype, 'getOccurrenceSubmission') + .resolves(({ input_key: 'string' } as unknown) as IOccurrenceSubmission); + sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); const result = get_signed_url.getSingleSubmissionURL(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.ts index 4f02afb78c..3d0597c577 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/getSignedUrl.ts @@ -2,9 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../../queries/queries'; +import { HTTP400 } from '../../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { OccurrenceService } from '../../../../../../../../services/occurrence-service'; import { getS3SignedURL } from '../../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../../utils/logger'; import { attachmentApiDocObject } from '../../../../../../../../utils/shared-api-docs'; @@ -92,25 +92,14 @@ export function getSingleSubmissionURL(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); try { - const getSurveyOccurrenceSubmissionSQLStatement = queries.survey.getSurveyOccurrenceSubmissionSQL( - Number(req.params.submissionId) - ); - - if (!getSurveyOccurrenceSubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); + const occurrenceService = new OccurrenceService(connection); - const result = await connection.query( - getSurveyOccurrenceSubmissionSQLStatement.text, - getSurveyOccurrenceSubmissionSQLStatement.values - ); + const result = await occurrenceService.getOccurrenceSubmission(Number(req.params.submissionId)); await connection.commit(); - const s3Key = result && result.rows.length && result.rows[0].input_key; - const s3SignedUrl = await getS3SignedURL(s3Key); + const s3SignedUrl = await getS3SignedURL(result.input_key); if (!s3SignedUrl) { return res.status(200).json(null); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.test.ts index d1f0aa0c6a..7c2b4cb57a 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.test.ts @@ -3,10 +3,10 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../../errors/http-error'; +import { IOccurrenceSubmission } from '../../../../../../../../repositories/occurrence-repository'; +import { OccurrenceService } from '../../../../../../../../services/occurrence-service'; import * as file_utils from '../../../../../../../../utils/file-utils'; import { ArchiveFile, MediaFile } from '../../../../../../../../utils/media/media-file'; import * as media_utils from '../../../../../../../../utils/media/media-utils'; @@ -44,99 +44,15 @@ describe('getObservationSubmissionCSVForView', () => { sinon.restore(); }); - it('should throw a 400 error when no projectId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = view.getObservationSubmissionCSVForView(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no surveyId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = view.getObservationSubmissionCSVForView(); - await result( - { ...sampleReq, params: { ...sampleReq.params, surveyId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `surveyId`'); - } - }); - - it('should throw a 400 error when no submissionId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = view.getObservationSubmissionCSVForView(); - await result( - { ...sampleReq, params: { ...sampleReq.params, submissionId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `submissionId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getSurveyOccurrenceSubmissionSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(null); - - try { - const result = view.getObservationSubmissionCSVForView(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - it('should throw a 500 error when no s3 file fetched', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [ - { - id: 13, - file_name: 'filename.txt' - } - ] - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`something`); + sinon.stub(OccurrenceService.prototype, 'getOccurrenceSubmission').resolves(); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves((null as unknown) as GetObjectOutput); @@ -152,26 +68,17 @@ describe('getObservationSubmissionCSVForView', () => { }); it('should throw a 500 error when fails to parse media file', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [ - { - id: 13, - file_name: 'filename.txt' - } - ] - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`something`); + sinon.stub(OccurrenceService.prototype, 'getOccurrenceSubmission').resolves(({ + id: 13, + file_name: 'filename.txt' + } as unknown) as IOccurrenceSubmission); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves({ file: 'myfile' } as GetObjectOutput); sinon.stub(media_utils, 'parseUnknownMedia').returns(null); @@ -188,26 +95,17 @@ describe('getObservationSubmissionCSVForView', () => { }); it('should return data on success with xlsx file (empty)', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [ - { - id: 13, - file_name: 'filename.txt' - } - ] - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`something`); + sinon.stub(OccurrenceService.prototype, 'getOccurrenceSubmission').resolves(({ + id: 13, + file_name: 'filename.txt' + } as unknown) as IOccurrenceSubmission); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves({ file: 'myfile' } as GetObjectOutput); sinon @@ -224,26 +122,17 @@ describe('getObservationSubmissionCSVForView', () => { }); it('should return data on success with dwc file (empty)', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [ - { - id: 13, - file_name: 'filename.txt' - } - ] - }); - sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { return 20; - }, - query: mockQuery + } }); - sinon.stub(survey_queries, 'getSurveyOccurrenceSubmissionSQL').returns(SQL`something`); + sinon.stub(OccurrenceService.prototype, 'getOccurrenceSubmission').resolves(({ + id: 13, + file_name: 'filename.txt' + } as unknown) as IOccurrenceSubmission); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves({ file: 'myfile' } as GetObjectOutput); sinon diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.ts index 402f344c91..fb620c5340 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/observation/submission/{submissionId}/view.ts @@ -2,9 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400, HTTP500 } from '../../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../../queries/queries'; +import { HTTP400, HTTP500 } from '../../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { OccurrenceService } from '../../../../../../../../services/occurrence-service'; import { generateS3FileKey, getFileFromS3 } from '../../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../../utils/logger'; import { DWCArchive } from '../../../../../../../../utils/media/dwc/dwc-archive-file'; @@ -42,7 +42,8 @@ GET.apiDoc = { in: 'path', name: 'projectId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -50,7 +51,8 @@ GET.apiDoc = { in: 'path', name: 'surveyId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true }, @@ -58,7 +60,8 @@ GET.apiDoc = { in: 'path', name: 'submissionId', schema: { - type: 'number' + type: 'integer', + minimum: 1 }, required: true } @@ -125,38 +128,18 @@ export function getObservationSubmissionCSVForView(): RequestHandler { return async (req, res) => { defaultLog.debug({ label: 'Get observation submission csv details', message: 'params', req_params: req.params }); - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.surveyId) { - throw new HTTP400('Missing required path param `surveyId`'); - } - - if (!req.params.submissionId) { - throw new HTTP400('Missing required path param `submissionId`'); - } - const connection = getDBConnection(req['keycloak_token']); try { - const getSubmissionSQLStatement = queries.survey.getSurveyOccurrenceSubmissionSQL( - Number(req.params.submissionId) - ); - - if (!getSubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); - const submissionData = await connection.query(getSubmissionSQLStatement.text, getSubmissionSQLStatement.values); + const occurrenceService = new OccurrenceService(connection); + + const result = await occurrenceService.getOccurrenceSubmission(Number(req.params.submissionId)); await connection.commit(); - const fileName = - (submissionData && submissionData.rows && submissionData.rows[0] && submissionData.rows[0].input_file_name) || - null; + const fileName = (result && result.input_file_name) || ''; const s3Key = generateS3FileKey({ projectId: Number(req.params.projectId), diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/publish.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/publish.test.ts deleted file mode 100644 index e7b6ade228..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/publish.test.ts +++ /dev/null @@ -1,67 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import { SurveyService } from '../../../../../services/survey-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../__mocks__/db'; -import { publishSurvey } from './publish'; - -chai.use(sinonChai); - -describe('publishSurvey', () => { - afterEach(() => { - sinon.restore(); - }); - - it('publishes a survey', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(SurveyService.prototype, 'publishSurvey').resolves(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - - mockReq.body = { - publish: true - }; - - try { - const requestHandler = publishSurvey(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.statusValue).to.equal(200); - }); - - it('catches and re-throws error', async () => { - const dbConnectionObj = getMockDBConnection({ release: sinon.stub() }); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(SurveyService.prototype, 'publishSurvey').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = publishSurvey(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.release).to.have.been.called; - - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); -}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/publish.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/publish.ts deleted file mode 100644 index db3f964610..0000000000 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/publish.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { PROJECT_ROLE } from '../../../../../constants/roles'; -import { getDBConnection } from '../../../../../database/db'; -import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; -import { SurveyService } from '../../../../../services/survey-service'; -import { getLogger } from '../../../../../utils/logger'; - -const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/publish'); - -export const PUT: Operation = [ - authorizeRequestHandler((req) => { - return { - and: [ - { - validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD], - projectId: Number(req.params.projectId), - discriminator: 'ProjectRole' - } - ] - }; - }), - publishSurvey() -]; - -PUT.apiDoc = { - description: 'Publish or unpublish a survey.', - tags: ['survey'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - }, - { - in: 'path', - name: 'surveyId', - schema: { - type: 'integer', - minimum: 1 - }, - required: true - } - ], - requestBody: { - description: 'Publish or unpublish put request object.', - content: { - 'application/json': { - schema: { - title: 'Publish request object', - type: 'object', - required: ['publish'], - properties: { - publish: { - description: 'Set to `true` to publish the survey, `false` to unpublish the survey', - type: 'boolean' - } - } - } - } - } - }, - responses: { - 200: { - description: 'Survey publish request completed successfully.', - content: { - 'application/json': { - schema: { - // TODO is there any return value? or is it just an HTTP status with no content? - title: 'Survey Response Object', - type: 'object', - required: ['id'], - properties: { - id: { - type: 'number' - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/401' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -/** - * Publish survey. - * - * @returns {RequestHandler} - */ -export function publishSurvey(): RequestHandler { - return async (req, res) => { - const surveyId = Number(req.params.surveyId); - const publish: boolean = req.body.publish; - - const connection = getDBConnection(req['keycloak_token']); - try { - await connection.open(); - - const surveyService = new SurveyService(connection); - - await surveyService.publishSurvey(surveyId, publish); - - await connection.commit(); - - return res.status(200).send(); - } catch (error) { - defaultLog.error({ label: 'publishSurvey', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.test.ts index 1a68cfe069..191f8e331f 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.test.ts @@ -2,10 +2,11 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; +import { MESSAGE_CLASS_NAME } from '../../../../../../../constants/status'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../errors/http-error'; +import { ISummarySubmissionMessagesResponse } from '../../../../../../../repositories/summary-repository'; +import { SummaryService } from '../../../../../../../services/summary-service'; import { getMockDBConnection } from '../../../../../../../__mocks__/db'; import * as summarySubmission from './get'; @@ -56,29 +57,6 @@ describe('getSummarySubmission', () => { } }); - it('should throw a 400 error when no sql statement returned for getLatestSurveySummarySubmissionSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'getLatestSurveySummarySubmissionSQL').returns(null); - - try { - const result = summarySubmission.getSurveySummarySubmission(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal( - 'Failed to build getLatestSurveySummarySubmissionSQLStatement statement' - ); - } - }); - it('should return a summary submission, on success', async () => { const mockQuery = sinon.stub(); @@ -100,7 +78,35 @@ describe('getSummarySubmission', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getLatestSurveySummarySubmissionSQL').returns(SQL`something`); + const messages: ISummarySubmissionMessagesResponse[] = [ + { + id: 1, + class: MESSAGE_CLASS_NAME.ERROR, + type: 'Miscellaneous', + message: 'error message' + }, + { + id: 2, + class: MESSAGE_CLASS_NAME.ERROR, + type: 'Miscellaneous', + message: 'another error message' + } + ]; + + const submission = { + id: 13, + file_name: 'file13.xlsx', + key: 's3_key', + delete_timestamp: null, + submission_message_type_id: 1, + message: 'another error message', + submission_message_type_name: 'Miscellaneous', + summary_submission_message_class_id: 1, + submission_message_class_name: MESSAGE_CLASS_NAME.ERROR + }; + + sinon.stub(SummaryService.prototype, 'getLatestSurveySummarySubmission').resolves(submission); + sinon.stub(SummaryService.prototype, 'getSummarySubmissionMessages').resolves(messages); const result = summarySubmission.getSurveySummarySubmission(); @@ -108,8 +114,8 @@ describe('getSummarySubmission', () => { expect(actualResult).to.be.eql({ id: 13, - fileName: 'file.xlsx', - messages: [] + fileName: 'file13.xlsx', + messages }); }); @@ -126,8 +132,6 @@ describe('getSummarySubmission', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getLatestSurveySummarySubmissionSQL').returns(SQL`something`); - const result = summarySubmission.getSurveySummarySubmission(); await result(sampleReq, sampleRes as any, (null as unknown) as any); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.ts index dc95781bc8..be1ba10c1a 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/get.ts @@ -2,9 +2,10 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../queries/queries'; +import { HTTP400 } from '../../../../../../../errors/http-error'; +import { ISummarySubmissionMessagesResponse } from '../../../../../../../repositories/summary-repository'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { SummaryService } from '../../../../../../../services/summary-service'; import { getLogger } from '../../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/summary/submission/get'); @@ -106,66 +107,34 @@ export function getSurveySummarySubmission(): RequestHandler { } const connection = getDBConnection(req['keycloak_token']); + const surveyId = Number(req.params.surveyId); try { - const getSurveySummarySubmissionSQLStatement = queries.survey.getLatestSurveySummarySubmissionSQL( - Number(req.params.surveyId) - ); - - if (!getSurveySummarySubmissionSQLStatement) { - throw new HTTP400('Failed to build getLatestSurveySummarySubmissionSQLStatement statement'); - } - await connection.open(); + const summaryService = new SummaryService(connection); - const summarySubmissionData = await connection.query( - getSurveySummarySubmissionSQLStatement.text, - getSurveySummarySubmissionSQLStatement.values - ); + const summarySubmissionDetails = await summaryService.getLatestSurveySummarySubmission(surveyId); - if ( - !summarySubmissionData || - !summarySubmissionData.rows || - !summarySubmissionData.rows[0] || - summarySubmissionData.rows[0].delete_timestamp - ) { + if (!summarySubmissionDetails || summarySubmissionDetails.delete_timestamp) { return res.status(200).json(null); } - let messageList: any[] = []; - - const errorStatus = summarySubmissionData.rows[0].submission_message_class_name; - - if (errorStatus === 'Error') { - const summary_submission_id = summarySubmissionData.rows[0].id; + let messageList: ISummarySubmissionMessagesResponse[] = []; + const messageClass = summarySubmissionDetails.submission_message_class_name; - const getSummarySubmissionErrorListSQLStatement = queries.survey.getSummarySubmissionMessagesSQL( - Number(summary_submission_id) - ); - - if (!getSummarySubmissionErrorListSQLStatement) { - throw new HTTP400('Failed to build SQL getSummarySubmissionMessagesSQL statement'); - } - - const summarySubmissionErrorListData = await connection.query( - getSummarySubmissionErrorListSQLStatement.text, - getSummarySubmissionErrorListSQLStatement.values - ); - - messageList = (summarySubmissionErrorListData && summarySubmissionErrorListData.rows) || []; + if (messageClass === 'Error') { + const summary_submission_id = summarySubmissionDetails.id; + messageList = await summaryService.getSummarySubmissionMessages(summary_submission_id); } await connection.commit(); const getSummarySubmissionData = - (summarySubmissionData && - summarySubmissionData.rows && - summarySubmissionData.rows[0] && { - id: summarySubmissionData.rows[0].id, - fileName: summarySubmissionData.rows[0].file_name, - messages: messageList - }) || - null; + { + id: summarySubmissionDetails.id, + fileName: summarySubmissionDetails.file_name, + messages: messageList + } || null; return res.status(200).json(getSummarySubmissionData); } catch (error) { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.test.ts index a47733e26d..8f2e60dd3b 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.test.ts @@ -2,11 +2,11 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../queries/survey'; +import { HTTP400, HTTPError } from '../../../../../../../errors/http-error'; +import { SummaryService } from '../../../../../../../services/summary-service'; import * as file_utils from '../../../../../../../utils/file-utils'; +import { XLSXCSV } from '../../../../../../../utils/media/xlsx/xlsx-file'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../../../__mocks__/db'; import * as upload from './upload'; @@ -31,7 +31,7 @@ describe('uploadSummarySubmission', () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); try { - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); await requestHandler(mockReq, mockRes, mockNext); expect.fail(); @@ -62,7 +62,7 @@ describe('uploadSummarySubmission', () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); try { - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); await requestHandler(mockReq, mockRes, mockNext); expect.fail(); @@ -94,7 +94,7 @@ describe('uploadSummarySubmission', () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); try { - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); await requestHandler(mockReq, mockRes, mockNext); expect.fail(); @@ -126,7 +126,7 @@ describe('uploadSummarySubmission', () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); try { - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); await requestHandler(mockReq, mockRes, mockNext); expect.fail(); @@ -136,41 +136,6 @@ describe('uploadSummarySubmission', () => { } }); - it('should throw a 400 error when no sql statement returned', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(survey_queries, 'insertSurveySummarySubmissionSQL').returns(null); - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - - const requestHandler = upload.uploadMedia(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL insert statement'); - } - }); - it('should throw a 400 error when file contains malicious content', async () => { const dbConnectionObj = getMockDBConnection(); @@ -194,7 +159,7 @@ describe('uploadSummarySubmission', () => { sinon.stub(file_utils, 'scanFileForVirus').resolves(false); - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); try { await requestHandler(mockReq, mockRes, mockNext); @@ -234,9 +199,11 @@ describe('uploadSummarySubmission', () => { }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveySummarySubmissionSQL').returns(SQL`some query`); + sinon + .stub(SummaryService.prototype, 'insertSurveySummarySubmission') + .throws(new HTTP400('Failed to insert survey summary submission record')); - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); try { await requestHandler(mockReq, mockRes, mockNext); @@ -247,49 +214,6 @@ describe('uploadSummarySubmission', () => { } }); - it('should throw a 400 error when it fails to get the update SQL', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ rowCount: 1, rows: [{ id: 1 }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveySummarySubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveySummarySubmissionWithKeySQL').returns(null); - - const requestHandler = upload.uploadMedia(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL update statement'); - } - }); - it('should throw a 400 error when it fails to get the update the record in the database', async () => { const dbConnectionObj = getMockDBConnection(); @@ -320,10 +244,11 @@ describe('uploadSummarySubmission', () => { }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveySummarySubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveySummarySubmissionWithKeySQL').returns(SQL`some query`); + sinon + .stub(SummaryService.prototype, 'updateSurveySummarySubmissionWithKey') + .throws(new HTTP400('Failed to update survey summary submission record')); - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); try { await requestHandler(mockReq, mockRes, mockNext); @@ -363,11 +288,9 @@ describe('uploadSummarySubmission', () => { }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveySummarySubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveySummarySubmissionWithKeySQL').returns(SQL`some query`); sinon.stub(file_utils, 'uploadFileToS3').rejects('Failed to insert occurrence submission data'); - const requestHandler = upload.uploadMedia(); + const requestHandler = upload.uploadAndValidate(); try { await requestHandler(mockReq, mockRes, mockNext); @@ -410,130 +333,21 @@ describe('uploadSummarySubmission', () => { }); sinon.stub(file_utils, 'scanFileForVirus').resolves(true); - sinon.stub(survey_queries, 'insertSurveySummarySubmissionSQL').returns(SQL`some query`); - sinon.stub(survey_queries, 'updateSurveySummarySubmissionWithKeySQL').returns(SQL`some query`); - + sinon + .stub(SummaryService.prototype, 'insertSurveySummarySubmission') + .resolves({ survey_summary_submission_id: 14 }); sinon.stub(file_utils, 'uploadFileToS3').resolves({ key: 'projects/1/surveys/1/test.txt' } as any); - - const requestHandler = upload.uploadMedia(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockNext).to.have.been.called; - }); - - it('should return with a 200 if errors messages exist and they are persisted', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - mockReq['parseError'] = 'some error exists'; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rowCount: 1, rows: [{ id: 1 }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery + sinon.stub(SummaryService.prototype, 'summaryTemplateValidation').resolves(); + sinon.stub(SummaryService.prototype, 'prepXLSX').returns({} as XLSXCSV); + sinon.stub(SummaryService.prototype, 'summaryTemplatePreparation').resolves({ + s3InputKey: 'projects/1/surveys/1/test.txt', + xlsx: {} as XLSXCSV }); - sinon.stub(survey_queries, 'insertSurveySummarySubmissionMessageSQL').returns(SQL`some query`); - - const requestHandler = upload.persistSummaryParseErrors(); + const requestHandler = upload.uploadAndValidate(); await requestHandler(mockReq, mockRes, mockNext); expect(mockRes.statusValue).to.equal(200); }); - - it('should move on the next step is there are no errors to be persisted', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rowCount: 1, rows: [{ id: 1 }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - const requestHandler = upload.persistSummaryParseErrors(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockNext).to.have.been.called; - }); - - it('should throw an error if there are errors when persisting error messages', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - projectId: '1', - surveyId: '2' - }; - mockReq.files = [ - { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - ] as any; - mockReq['parseError'] = 'some error exists'; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({}); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(survey_queries, 'insertSurveySummarySubmissionMessageSQL').returns(SQL`some query`); - - const requestHandler = upload.persistSummaryParseErrors(); - - try { - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to insert summary submission message data'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.ts index 858d3c511f..97b1a4b642 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/upload.ts @@ -1,20 +1,14 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { PostSummaryDetails } from '../../../../../../../models/summaryresults-create'; -import { generateHeaderErrorMessage, generateRowErrorMessage } from '../../../../../../../paths/dwc/validate'; -import { validateXLSX } from '../../../../../../../paths/xlsx/validate'; -import { queries } from '../../../../../../../queries/queries'; +import { SUMMARY_SUBMISSION_MESSAGE_TYPE } from '../../../../../../../constants/status'; +import { getDBConnection } from '../../../../../../../database/db'; +import { HTTP400 } from '../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../request-handlers/security/authorization'; +import { SummaryService } from '../../../../../../../services/summary-service'; import { generateS3FileKey, scanFileForVirus, uploadFileToS3 } from '../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../utils/logger'; -import { ICsvState } from '../../../../../../../utils/media/csv/csv-file'; -import { IMediaState, MediaFile } from '../../../../../../../utils/media/media-file'; -import { parseUnknownMedia } from '../../../../../../../utils/media/media-utils'; -import { ValidationSchemaParser } from '../../../../../../../utils/media/validation/validation-schema-parser'; -import { XLSXCSV } from '../../../../../../../utils/media/xlsx/xlsx-file'; +import { MessageError, SummarySubmissionError } from '../../../../../../../utils/submission-error'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/summary/upload'); @@ -30,14 +24,7 @@ export const POST: Operation = [ ] }; }), - uploadMedia(), - prepXLSX(), - persistSummaryParseErrors(), - getValidationRules(), - validateXLSX(), - persistSummaryValidationResults(), - parseAndUploadSummarySubmissionInput(), - returnSummarySubmissionId() + uploadAndValidate() ]; POST.apiDoc = { @@ -111,31 +98,14 @@ POST.apiDoc = { } }; -export enum SUMMARY_CLASS { - STUDY_AREA = 'survey area', - SUMMARY_STATISTIC = 'statistic', - STRATUM = 'stratum', - OBSERVED = 'observed', - ESTIMATE = 'estimate', - STANDARD_ERROR = 'se', - COEFFICIENT_VARIATION = 'cv', - CONFIDENCE_LEVEL = 'conf.level', - LOWER_CONFIDENCE_LIMIT = 'lcl', - UPPER_CONFIDENCE_LIMIT = 'ucl', - SIGHTABILITY_MODEL = 'sightability.model', - AREA = 'area', - AREA_FLOWN = 'area.flown', - OUTLIER_BLOCKS_REMOVED = 'outlier.blocks.removed', - ANALYSIS_METHOD = 'analysis.method' -} - /** - * Uploads a media file to S3 and inserts a matching record in the `summary_submission` table. + * Uploads a media file to S3 and inserts a matching record in the `summary_submission` table, + * then validates the submission. * * @return {*} {RequestHandler} */ -export function uploadMedia(): RequestHandler { - return async (req, res, next) => { +export function uploadAndValidate(): RequestHandler { + return async (req, res) => { const rawMediaArray: Express.Multer.File[] = req.files as Express.Multer.File[]; if (!rawMediaArray || !rawMediaArray.length) { @@ -166,10 +136,13 @@ export function uploadMedia(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); + let summarySubmissionId: number | null = null; + try { const rawMediaFile = rawMediaArray[0]; await connection.open(); + const summaryService = new SummaryService(connection); // Scan file for viruses using ClamAV const virusScanResult = await scanFileForVirus(rawMediaFile); @@ -178,23 +151,19 @@ export function uploadMedia(): RequestHandler { throw new HTTP400('Malicious content detected, upload cancelled'); } - const response = await insertSurveySummarySubmission( - Number(req.params.surveyId), - 'BioHub', - rawMediaFile.originalname, - connection - ); - - const summarySubmissionId = response.rows[0].id; + const surveyId = Number(req.params.surveyId); + summarySubmissionId = ( + await summaryService.insertSurveySummarySubmission(surveyId, 'BioHub', rawMediaFile.originalname) + ).survey_summary_submission_id; const key = generateS3FileKey({ projectId: Number(req.params.projectId), - surveyId: Number(req.params.surveyId), + surveyId: surveyId, folder: `summaryresults/${summarySubmissionId}`, fileName: rawMediaFile.originalname }); - await updateSurveySummarySubmissionWithKey(summarySubmissionId, key, connection); + await summaryService.updateSurveySummarySubmissionWithKey(summarySubmissionId, key); await connection.commit(); @@ -204,538 +173,30 @@ export function uploadMedia(): RequestHandler { email: (req['auth_payload'] && req['auth_payload'].email) || '' }; + // Upload submission to S3 await uploadFileToS3(rawMediaFile, key, metadata); - req['s3File'] = rawMediaFile; + // Validate submission + await summaryService.validateFile(summarySubmissionId, surveyId); - req['summarySubmissionId'] = summarySubmissionId; - next(); + return res.status(200).json({ summarySubmissionId }); } catch (error) { defaultLog.error({ label: 'uploadMedia', message: 'error', error }); await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -export function prepXLSX(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'prepXLSX', message: 's3File' }); - - try { - const s3File = req['s3File']; - - const parsedMedia = parseUnknownMedia(s3File); - - if (!parsedMedia) { - req['parseError'] = 'Failed to parse submission, file was empty'; - - return next(); - } - - if (!(parsedMedia instanceof MediaFile)) { - req['parseError'] = 'Failed to parse submission, not a valid XLSX CSV file'; - - return next(); - } - - const xlsxCsv = new XLSXCSV(parsedMedia); - - req['xlsx'] = xlsxCsv; - - next(); - } catch (error) { - defaultLog.error({ label: 'prepXLSX', message: 'error', error }); - throw error; - } - }; -} - -/** - * Inserts a new record into the `survey_summary_submission` table. - * - * @param {number} surveyId - * @param {string} source - * @param {string} file_name - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const insertSurveySummarySubmission = async ( - surveyId: number, - source: string, - file_name: string, - connection: IDBConnection -): Promise => { - const insertSqlStatement = queries.survey.insertSurveySummarySubmissionSQL(surveyId, source, file_name); - - if (!insertSqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const insertResponse = await connection.query(insertSqlStatement.text, insertSqlStatement.values); - - if (!insertResponse || !insertResponse.rowCount) { - throw new HTTP400('Failed to insert survey summary submission record'); - } - - return insertResponse; -}; - -/** - * Update existing `survey_summary_submission` record with key. - * - * @param {number} submissionId - * @param {string} key - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const updateSurveySummarySubmissionWithKey = async ( - submissionId: number, - key: string, - connection: IDBConnection -): Promise => { - const updateSqlStatement = queries.survey.updateSurveySummarySubmissionWithKeySQL(submissionId, key); - - if (!updateSqlStatement) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const updateResponse = await connection.query(updateSqlStatement.text, updateSqlStatement.values); - - if (!updateResponse || !updateResponse.rowCount) { - throw new HTTP400('Failed to update survey summary submission record'); - } - - return updateResponse; -}; - -export function persistSummaryParseErrors(): RequestHandler { - return async (req, res, next) => { - const parseError = req['parseError']; - - defaultLog.debug({ label: 'persistSummaryParseErrors', message: 'parseError', parseError }); - - if (!parseError) { - // no errors to persist, skip to next step - return next(); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const summarySubmissionId = req['summarySubmissionId']; - await insertSummarySubmissionMessage(summarySubmissionId, 'Error', parseError, 'Miscellaneous', connection); - - await connection.commit(); - - // archive is not parsable, don't continue to next step and return early - return res.status(200).send(); - } catch (error) { - defaultLog.error({ label: 'persistParseErrors', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -export function getValidationRules(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getValidationRules', message: 's3File' }); - - try { - const validationSchema = { - name: '', - description: '', - defaultFile: { - description: '', - columns: [ - { - name: 'Observed', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'Estimate', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'SE', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'CV', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'Conf.Level', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'LCL', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'UCL', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'Area', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - }, - { - name: 'Area.Flown', - description: '', - validations: [ - { - column_numeric_validator: { - name: '', - description: '' - } - } - ] - } - ], - validations: [ - { - file_duplicate_columns_validator: {} - }, - { - file_required_columns_validator: { - required_columns: [ - 'Survey Area', - 'Statistic', - 'Stratum', - 'Observed', - 'Estimate', - 'SE', - 'CV', - 'Conf.Level', - 'LCL', - 'UCL', - 'Sightability.Model', - 'Area', - 'Area.Flown', - 'Outlier.Blocks.Removed', - 'Analysis.Method' - ] - } - } - ] - }, - validations: [ - { - mimetype_validator: { - reg_exps: ['text\\/csv', 'application\\/vnd.*'] - } - } - ] - }; - const validationSchemaParser = new ValidationSchemaParser(validationSchema); - - req['validationSchemaParser'] = validationSchemaParser; - - next(); - } catch (error) { - defaultLog.debug({ label: 'getValidationRules', message: 'error', error }); - throw error; - } - }; -} - -export function persistSummaryValidationResults(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'persistValidationResults', message: 'validationResults' }); - - const mediaState: IMediaState = req['mediaState']; - const csvState: ICsvState[] = req['csvState']; - - if (mediaState.isValid && csvState?.every((item) => item.isValid)) { - return next(); - } - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const summarySubmissionId = req['summarySubmissionId']; - - const promises: Promise[] = []; - - mediaState.fileErrors?.forEach((fileError) => { - promises.push( - insertSummarySubmissionMessage(summarySubmissionId, 'Error', `${fileError}`, 'Miscellaneous', connection) + // Log error in summary submission error messages table + if (summarySubmissionId) { + const summaryService = new SummaryService(connection); + await summaryService.insertSummarySubmissionError( + summarySubmissionId, + new SummarySubmissionError({ + messages: [new MessageError(SUMMARY_SUBMISSION_MESSAGE_TYPE.SYSTEM_ERROR)] + }) ); - }); - - csvState?.forEach((csvStateItem) => { - csvStateItem.headerErrors?.forEach((headerError) => { - promises.push( - insertSummarySubmissionMessage( - summarySubmissionId, - 'Error', - generateHeaderErrorMessage(csvStateItem.fileName, headerError), - headerError.errorCode, - connection - ) - ); - }); - - csvStateItem.rowErrors?.forEach((rowError) => { - promises.push( - insertSummarySubmissionMessage( - summarySubmissionId, - 'Error', - generateRowErrorMessage(csvStateItem.fileName, rowError), - rowError.errorCode, - connection - ) - ); - }); - }); - - await Promise.all(promises); - - await connection.commit(); - - return res.status(200).send(); - } catch (error) { - defaultLog.error({ label: 'persistValidationResults', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -export function parseAndUploadSummarySubmissionInput(): RequestHandler { - return async (req, res, next) => { - const xlsxCsv: XLSXCSV = req['xlsx']; - - const summarySubmissionId = req['summarySubmissionId']; - - const connection = getDBConnection(req['keycloak_token']); - - const worksheets = xlsxCsv.workbook.worksheets; - - try { - await connection.open(); - - const promises: Promise[] = []; - - for (const worksheet of Object.values(worksheets)) { - const rowObjects = worksheet.getRowObjects(); - - for (const rowObject of Object.values(rowObjects)) { - const summaryObject = new PostSummaryDetails(); - - for (const columnName in rowObject) { - const columnValue = rowObject[columnName]; - - switch (columnName.toLowerCase()) { - case SUMMARY_CLASS.STUDY_AREA: - summaryObject.study_area_id = columnValue; - break; - case SUMMARY_CLASS.SUMMARY_STATISTIC: - summaryObject.parameter = columnValue; - break; - case SUMMARY_CLASS.STRATUM: - summaryObject.stratum = columnValue; - break; - case SUMMARY_CLASS.OBSERVED: - summaryObject.parameter_value = columnValue; - break; - case SUMMARY_CLASS.ESTIMATE: - summaryObject.parameter_estimate = columnValue; - break; - case SUMMARY_CLASS.STANDARD_ERROR: - summaryObject.standard_error = columnValue; - break; - case SUMMARY_CLASS.COEFFICIENT_VARIATION: - summaryObject.coefficient_variation = columnValue; - break; - case SUMMARY_CLASS.CONFIDENCE_LEVEL: - summaryObject.confidence_level_percent = columnValue; - break; - case SUMMARY_CLASS.UPPER_CONFIDENCE_LIMIT: - summaryObject.confidence_limit_upper = columnValue; - break; - case SUMMARY_CLASS.LOWER_CONFIDENCE_LIMIT: - summaryObject.confidence_limit_lower = columnValue; - break; - case SUMMARY_CLASS.SIGHTABILITY_MODEL: - summaryObject.sightability_model = columnValue; - break; - case SUMMARY_CLASS.AREA: - summaryObject.total_area_survey_sqm = columnValue; - break; - case SUMMARY_CLASS.AREA_FLOWN: - summaryObject.kilometres_surveyed = columnValue; - break; - case SUMMARY_CLASS.OUTLIER_BLOCKS_REMOVED: - summaryObject.outlier_blocks_removed = columnValue; - break; - case SUMMARY_CLASS.ANALYSIS_METHOD: - summaryObject.analysis_method = columnValue; - break; - default: - break; - } - } - promises.push(uploadScrapedSummarySubmission(summarySubmissionId, summaryObject, connection)); - } } - - await Promise.all(promises); - - await connection.commit(); - next(); - } catch (error) { - defaultLog.error({ label: 'parseAndUploadSummaryDetails', message: 'error', error }); - await connection.rollback(); throw error; } finally { connection.release(); } }; } - -function returnSummarySubmissionId(): RequestHandler { - return async (req, res) => { - const summarySubmissionId = req['summarySubmissionId']; - - return res.status(200).json({ summarySubmissionId }); - }; -} - -/** - * Upload scraped summary submission data. - * - * @param {number} summarySubmissionId - * @param {any} scrapedSummaryDetail - * @param {IDBConnection} connection - * @return {*} - */ -export const uploadScrapedSummarySubmission = async ( - summarySubmissionId: number, - scrapedSummaryDetail: any, - connection: IDBConnection -) => { - const sqlStatement = queries.survey.insertSurveySummaryDetailsSQL(summarySubmissionId, scrapedSummaryDetail); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL post statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert summary details data'); - } -}; - -/** - * Insert a record into the survey_summary_submission_message table. - * - * @param {number} submissionStatusId - * @param {string} submissionMessageType - * @param {string} message - * @param {string} errorCode - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const insertSummarySubmissionMessage = async ( - submissionStatusId: number, - submissionMessageType: string, - message: string, - errorCode: string, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.insertSurveySummarySubmissionMessageSQL( - submissionStatusId, - submissionMessageType, - message, - errorCode - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert summary submission message data'); - } -}; diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.test.ts index 9b2a036d98..12b654174f 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.test.ts @@ -2,10 +2,9 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../../errors/http-error'; +import { SummaryService } from '../../../../../../../../services/summary-service'; import { getMockDBConnection } from '../../../../../../../../__mocks__/db'; import * as delete_submission from './delete'; @@ -90,27 +89,6 @@ describe('deleteSummarySubmission', () => { } }); - it('should throw a 400 error when no sql statement returned for deleteSummarySubmissionSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'deleteSummarySubmissionSQL').returns(null); - - try { - const result = delete_submission.deleteSummarySubmission(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL delete statement'); - } - }); - it('should return null when no rowCount', async () => { const mockQuery = sinon.stub(); @@ -124,7 +102,7 @@ describe('deleteSummarySubmission', () => { query: mockQuery }); - sinon.stub(survey_queries, 'deleteSummarySubmissionSQL').returns(SQL`something`); + sinon.stub(SummaryService.prototype, 'deleteSummarySubmission').resolves(null); const result = delete_submission.deleteSummarySubmission(); @@ -146,7 +124,7 @@ describe('deleteSummarySubmission', () => { query: mockQuery }); - sinon.stub(survey_queries, 'deleteSummarySubmissionSQL').returns(SQL`something`); + sinon.stub(SummaryService.prototype, 'deleteSummarySubmission').resolves(1); const result = delete_submission.deleteSummarySubmission(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.ts index 729560e1c4..1daa5db95c 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete.ts @@ -2,9 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../../queries/queries'; +import { HTTP400 } from '../../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { SummaryService } from '../../../../../../../../services/summary-service'; import { getLogger } from '../../../../../../../../utils/logger'; const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/delete'); @@ -111,24 +111,13 @@ export function deleteSummarySubmission(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); try { - const deleteSubmissionSQLStatement = queries.survey.deleteSummarySubmissionSQL(Number(req.params.summaryId)); - - if (!deleteSubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - await connection.open(); + const summaryService = new SummaryService(connection); - const deleteResult = await connection.query( - deleteSubmissionSQLStatement.text, - deleteSubmissionSQLStatement.values - ); - + const result = await summaryService.deleteSummarySubmission(Number(req.params.summaryId)); await connection.commit(); - const deleteResponse = (deleteResult && deleteResult.rowCount) || null; - - return res.status(200).json(deleteResponse); + return res.status(200).json(result); } catch (error) { defaultLog.error({ label: 'deleteSummarySubmission', message: 'error', error }); await connection.rollback(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.test.ts index ff2272da22..466619152e 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.test.ts @@ -2,10 +2,9 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { HTTP400, HTTPError } from '../../../../../../../../errors/http-error'; +import { SummaryService } from '../../../../../../../../services/summary-service'; import * as file_utils from '../../../../../../../../utils/file-utils'; import { getMockDBConnection } from '../../../../../../../../__mocks__/db'; import * as get_signed_url from './getSignedUrl'; @@ -94,7 +93,7 @@ describe('getSingleSubmissionURL', () => { } }); - it('should throw a 400 error when no sql statement returned', async () => { + it('should throw a 400 error when no submission URL is found', async () => { sinon.stub(db, 'getDBConnection').returns({ ...dbConnectionObj, systemUserId: () => { @@ -102,7 +101,9 @@ describe('getSingleSubmissionURL', () => { } }); - sinon.stub(survey_queries, 'getSurveySummarySubmissionSQL').returns(null); + sinon + .stub(SummaryService.prototype, 'findSummarySubmissionById') + .throws(new HTTP400('Failed to query survey summary submission table')); try { const result = get_signed_url.getSingleSummarySubmissionURL(); @@ -111,7 +112,7 @@ describe('getSingleSubmissionURL', () => { expect.fail(); } catch (actualError) { expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); + expect((actualError as HTTPError).message).to.equal('Failed to query survey summary submission table'); } }); @@ -128,7 +129,6 @@ describe('getSingleSubmissionURL', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getSurveySummarySubmissionSQL').returns(SQL`some query`); sinon.stub(file_utils, 'getS3SignedURL').resolves(null); const result = get_signed_url.getSingleSummarySubmissionURL(); @@ -151,7 +151,20 @@ describe('getSingleSubmissionURL', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getSurveySummarySubmissionSQL').returns(SQL`some query`); + sinon.stub(SummaryService.prototype, 'findSummarySubmissionById').resolves({ + survey_summary_submission_id: 1, + survey_id: 1, + source: 'source', + event_timestamp: null, + delete_timestamp: null, + key: 'myurlsigned.com', + file_name: 'filename', + create_user: 1, + update_date: null, + update_user: null, + revision_count: 1, + summary_template_species_id: 1 + }); sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); const result = get_signed_url.getSingleSummarySubmissionURL(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.ts index 05a11d1e33..9605060f55 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/getSignedUrl.ts @@ -2,9 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../../queries/queries'; +import { HTTP400 } from '../../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { SummaryService } from '../../../../../../../../services/summary-service'; import { getS3SignedURL } from '../../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../../utils/logger'; import { attachmentApiDocObject } from '../../../../../../../../utils/shared-api-docs'; @@ -90,24 +90,13 @@ export function getSingleSummarySubmissionURL(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); try { - const getSurveySummarySubmissionSQLStatement = queries.survey.getSurveySummarySubmissionSQL( - Number(req.params.summaryId) - ); - - if (!getSurveySummarySubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); + const summaryService = new SummaryService(connection); - const result = await connection.query( - getSurveySummarySubmissionSQLStatement.text, - getSurveySummarySubmissionSQLStatement.values - ); - + const summarySubmission = await summaryService.findSummarySubmissionById(Number(req.params.summaryId)); await connection.commit(); - const s3Key = result && result.rows.length && result.rows[0].key; + const s3Key = summarySubmission.key; const s3SignedUrl = await getS3SignedURL(s3Key); if (!s3SignedUrl) { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.test.ts index b04681aca2..def24a2f50 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.test.ts @@ -3,10 +3,8 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import * as db from '../../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../../errors/custom-error'; -import survey_queries from '../../../../../../../../queries/survey'; +import { HTTPError } from '../../../../../../../../errors/http-error'; import * as file_utils from '../../../../../../../../utils/file-utils'; import { MediaFile } from '../../../../../../../../utils/media/media-file'; import * as media_utils from '../../../../../../../../utils/media/media-utils'; @@ -95,27 +93,6 @@ describe('getSurveySubmissionCSVForView', () => { } }); - it('should throw a 400 error when no sql statement returned for getSurveySummarySubmissionSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(survey_queries, 'getSurveySummarySubmissionSQL').returns(null); - - try { - const result = view.getSummarySubmissionCSVForView(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - it('should throw a 500 error when no s3 file fetched', async () => { const mockQuery = sinon.stub(); @@ -136,7 +113,6 @@ describe('getSurveySubmissionCSVForView', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getLatestSurveySummarySubmissionSQL').returns(SQL`something`); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves((null as unknown) as GetObjectOutput); @@ -171,7 +147,6 @@ describe('getSurveySubmissionCSVForView', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getLatestSurveySummarySubmissionSQL').returns(SQL`something`); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves({ file: 'myfile' } as GetObjectOutput); sinon.stub(media_utils, 'parseUnknownMedia').returns(null); @@ -207,7 +182,6 @@ describe('getSurveySubmissionCSVForView', () => { query: mockQuery }); - sinon.stub(survey_queries, 'getLatestSurveySummarySubmissionSQL').returns(SQL`something`); sinon.stub(file_utils, 'generateS3FileKey').resolves('validkey'); sinon.stub(file_utils, 'getFileFromS3').resolves({ file: 'myfile' } as GetObjectOutput); sinon diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.ts index 794b6c90e0..81cbf2ae5f 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/summary/submission/{summaryId}/view.ts @@ -2,9 +2,9 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../../../../../../constants/roles'; import { getDBConnection } from '../../../../../../../../database/db'; -import { HTTP400, HTTP500 } from '../../../../../../../../errors/custom-error'; -import { queries } from '../../../../../../../../queries/queries'; +import { HTTP400, HTTP500 } from '../../../../../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../../../../../request-handlers/security/authorization'; +import { SummaryService } from '../../../../../../../../services/summary-service'; import { generateS3FileKey, getFileFromS3 } from '../../../../../../../../utils/file-utils'; import { getLogger } from '../../../../../../../../utils/logger'; import { DWCArchive } from '../../../../../../../../utils/media/dwc/dwc-archive-file'; @@ -140,25 +140,19 @@ export function getSummarySubmissionCSVForView(): RequestHandler { const connection = getDBConnection(req['keycloak_token']); try { - const getSubmissionSQLStatement = queries.survey.getSurveySummarySubmissionSQL(Number(req.params.summaryId)); - - if (!getSubmissionSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - await connection.open(); + const summaryService = new SummaryService(connection); - const submissionData = await connection.query(getSubmissionSQLStatement.text, getSubmissionSQLStatement.values); - + const summaryId = Number(req.params.summaryId); + const summarySubmission = await summaryService.findSummarySubmissionById(summaryId); await connection.commit(); - const fileName = - (submissionData && submissionData.rows && submissionData.rows[0] && submissionData.rows[0].file_name) || null; + const fileName = summarySubmission.file_name; const s3Key = generateS3FileKey({ projectId: Number(req.params.projectId), surveyId: Number(req.params.surveyId), - summaryId: Number(req.params.summaryId), + summaryId, fileName }); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/update.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/update.test.ts index 9fa1b40409..86d143b8f7 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/update.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/update.test.ts @@ -3,7 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; +import { HTTPError } from '../../../../../errors/http-error'; +import { PlatformService } from '../../../../../services/platform-service'; import { SurveyService } from '../../../../../services/survey-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../__mocks__/db'; import { updateSurvey } from './update'; @@ -22,6 +23,8 @@ describe('updateSurvey', () => { sinon.stub(SurveyService.prototype, 'updateSurvey').resolves(); + sinon.stub(PlatformService.prototype, 'submitDwCAMetadataPackage').resolves(); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.params = { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/update.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/update.ts index f65cae4e8c..289a4539fd 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/update.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/update.ts @@ -5,6 +5,7 @@ import { getDBConnection } from '../../../../../database/db'; import { PutSurveyObject } from '../../../../../models/survey-update'; import { geoJsonFeature } from '../../../../../openapi/schemas/geoJson'; import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; +import { PlatformService } from '../../../../../services/platform-service'; import { SurveyService } from '../../../../../services/survey-service'; import { getLogger } from '../../../../../utils/logger'; @@ -81,7 +82,8 @@ PUT.apiDoc = { }, end_date: { type: 'string', - description: 'ISO 8601 date string' + description: 'ISO 8601 date string', + nullable: true }, biologist_first_name: { type: 'string' @@ -116,13 +118,25 @@ PUT.apiDoc = { }, permit: { type: 'object', - required: ['permit_number', 'permit_type'], properties: { - permit_number: { - type: 'string' - }, - permit_type: { - type: 'string' + permits: { + type: 'array', + items: { + type: 'object', + required: ['permit_number', 'permit_type'], + properties: { + permit_id: { + type: 'number', + nullable: true + }, + permit_number: { + type: 'string' + }, + permit_type: { + type: 'string' + } + } + } } } }, @@ -146,7 +160,6 @@ PUT.apiDoc = { 'proprietary_data_category', 'proprietor_name', 'category_rationale', - 'first_nations_id', 'disa_required' ], properties: { @@ -162,9 +175,6 @@ PUT.apiDoc = { category_rationale: { type: 'string' }, - first_nations_id: { - type: 'number' - }, disa_required: { type: 'string' } @@ -178,7 +188,6 @@ PUT.apiDoc = { 'field_method_id', 'vantage_code_ids', 'ecological_season_id', - 'surveyed_all_areas', 'revision_count' ], properties: { @@ -200,10 +209,6 @@ PUT.apiDoc = { ecological_season_id: { type: 'number' }, - surveyed_all_areas: { - type: 'string', - enum: ['true', 'false'] - }, revision_count: { type: 'number' } @@ -211,7 +216,7 @@ PUT.apiDoc = { }, location: { type: 'object', - required: ['survey_area_name', 'geometry', 'revision_count'], + required: ['survey_area_name', 'geometry'], properties: { survey_area_name: { type: 'string' @@ -282,7 +287,15 @@ export function updateSurvey(): RequestHandler { const surveyService = new SurveyService(connection); - await surveyService.updateSurvey(projectId, surveyId, sanitizedPutSurveyData); + await surveyService.updateSurvey(surveyId, sanitizedPutSurveyData); + + try { + const platformService = new PlatformService(connection); + await platformService.submitDwCAMetadataPackage(projectId); + } catch (error) { + // Don't fail the rest of the endpoint if submitting metadata fails + defaultLog.error({ label: 'updateSurvey->submitDwCAMetadataPackage', message: 'error', error }); + } await connection.commit(); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/update/get.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/update/get.test.ts new file mode 100644 index 0000000000..32fe083303 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/update/get.test.ts @@ -0,0 +1,176 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../../../../../database/db'; +import { HTTPError } from '../../../../../../errors/http-error'; +import { SurveyObject } from '../../../../../../models/survey-view'; +import { SurveyService } from '../../../../../../services/survey-service'; +import { getMockDBConnection } from '../../../../../../__mocks__/db'; +import * as get from './get'; + +chai.use(sinonChai); + +describe('getSurveyForUpdate', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns({ + ...dbConnectionObj, + systemUserId: () => { + return 20; + } + }); + + const expectedError = new Error('cannot process request'); + sinon.stub(SurveyService.prototype, 'getSurveyById').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; + + try { + const result = get.getSurveyForUpdate(); + + await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal(expectedError.message); + } + }); + + it('should succeed with partial data', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns({ + ...dbConnectionObj, + systemUserId: () => { + return 20; + } + }); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; + + const getSurveyByIdStub = sinon.stub(SurveyService.prototype, 'getSurveyById').resolves(({ + id: 1, + proprietor: {}, + funding: {} + } as unknown) as SurveyObject); + + const expectedResponse = { + surveyData: { + id: 1, + proprietor: { + survey_data_proprietary: 'false', + proprietor_type_name: '', + proprietary_data_category: 0, + first_nations_name: '', + first_nations_id: 0, + category_rationale: '', + proprietor_name: '', + disa_required: 'false' + }, + funding: { + funding_sources: [] + }, + agreements: { + sedis_procedures_accepted: 'true', + foippa_requirements_accepted: 'true' + } + } + }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; + + const result = get.getSurveyForUpdate(); + + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(getSurveyByIdStub).to.be.calledOnce; + }); + + it('should succeed with valid data', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns({ + ...dbConnectionObj, + systemUserId: () => { + return 20; + } + }); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; + + const getSurveyByIdStub = sinon.stub(SurveyService.prototype, 'getSurveyById').resolves(({ + id: 1, + proprietor: { proprietor_type_id: 1, first_nations_id: 1, disa_required: true }, + funding: { funding_sources: [{ pfs_id: 1 }] } + } as unknown) as SurveyObject); + + const expectedResponse = { + surveyData: { + id: 1, + proprietor: { + survey_data_proprietary: 'true', + proprietary_data_category: 1, + proprietor_type_id: 1, + first_nations_id: 1, + disa_required: 'true' + }, + funding: { + funding_sources: [1] + }, + agreements: { + sedis_procedures_accepted: 'true', + foippa_requirements_accepted: 'true' + } + } + }; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; + + const result = get.getSurveyForUpdate(); + + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(expectedResponse); + expect(getSurveyByIdStub).to.be.calledOnce; + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/update/get.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/update/get.ts new file mode 100644 index 0000000000..1c756cf2cf --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/update/get.ts @@ -0,0 +1,353 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { PROJECT_ROLE } from '../../../../../../constants/roles'; +import { getDBConnection } from '../../../../../../database/db'; +import { geoJsonFeature } from '../../../../../../openapi/schemas/geoJson'; +import { authorizeRequestHandler } from '../../../../../../request-handlers/security/authorization'; +import { SurveyService } from '../../../../../../services/survey-service'; +import { getLogger } from '../../../../../../utils/logger'; + +const defaultLog = getLogger('paths/project/{projectId}/survey/{surveyId}/update/get'); + +export const GET: Operation = [ + authorizeRequestHandler((req) => { + return { + and: [ + { + validProjectRoles: [PROJECT_ROLE.PROJECT_LEAD, PROJECT_ROLE.PROJECT_EDITOR, PROJECT_ROLE.PROJECT_VIEWER], + projectId: Number(req.params.projectId), + discriminator: 'ProjectRole' + } + ] + }; + }), + getSurveyForUpdate() +]; + +GET.apiDoc = { + description: 'Get a project survey, for update purposes.', + tags: ['survey'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + responses: { + 200: { + description: 'Survey with matching surveyId and projectId.', + content: { + 'application/json': { + schema: { + title: 'Survey get response object, for view purposes', + type: 'object', + required: ['surveyData'], + properties: { + surveyData: { + type: 'object', + required: [ + 'survey_details', + 'species', + 'permit', + 'funding', + 'proprietor', + 'purpose_and_methodology', + 'location' + ], + properties: { + survey_details: { + description: 'Survey Details', + type: 'object', + required: [ + 'survey_name', + 'start_date', + 'biologist_first_name', + 'biologist_last_name', + 'revision_count' + ], + properties: { + survey_name: { + type: 'string' + }, + start_date: { + oneOf: [{ type: 'object' }, { type: 'string', format: 'date' }], + description: 'ISO 8601 date string for the funding end_date' + }, + end_date: { + oneOf: [{ type: 'object' }, { type: 'string', format: 'date' }], + nullable: true, + description: 'ISO 8601 date string for the funding end_date' + }, + biologist_first_name: { + type: 'string' + }, + biologist_last_name: { + type: 'string' + }, + revision_count: { + type: 'number' + } + } + }, + species: { + description: 'Survey Species', + type: 'object', + required: ['focal_species', 'focal_species_names', 'ancillary_species', 'ancillary_species_names'], + properties: { + ancillary_species: { + nullable: true, + type: 'array', + items: { + type: 'number' + } + }, + ancillary_species_names: { + nullable: true, + type: 'array', + items: { + type: 'string' + } + }, + focal_species: { + type: 'array', + items: { + type: 'number' + } + }, + focal_species_names: { + type: 'array', + items: { + type: 'string' + } + } + } + }, + permit: { + description: 'Survey Permit', + type: 'object', + properties: { + permits: { + type: 'array', + items: { + type: 'object', + required: ['permit_id', 'permit_number', 'permit_type'], + properties: { + permit_id: { + type: 'number', + minimum: 1 + }, + permit_number: { + type: 'string' + }, + permit_type: { + type: 'string' + } + } + } + } + } + }, + funding: { + description: 'Survey Funding Sources', + type: 'object', + properties: { + funding_sources: { + type: 'array', + items: { + type: 'integer' + } + } + } + }, + purpose_and_methodology: { + description: 'Survey Details', + type: 'object', + required: [ + 'field_method_id', + 'additional_details', + 'intended_outcome_id', + 'ecological_season_id', + 'vantage_code_ids', + 'revision_count' + ], + properties: { + field_method_id: { + type: 'number' + }, + additional_details: { + type: 'string', + nullable: true + }, + intended_outcome_id: { + type: 'number', + nullable: true + }, + ecological_season_id: { + type: 'number', + nullable: true + }, + vantage_code_ids: { + type: 'array', + items: { + type: 'number' + } + } + } + }, + proprietor: { + description: 'Survey Proprietor Details', + type: 'object', + nullable: true, + required: [ + 'survey_data_proprietary', + 'proprietor_type_name', + 'proprietary_data_category', + 'first_nations_name', + 'first_nations_id', + 'category_rationale', + 'proprietor_name', + 'disa_required' + ], + properties: { + survey_data_proprietary: { + type: 'string' + }, + proprietor_type_name: { + type: 'string', + nullable: true + }, + disa_required: { + type: 'string' + }, + first_nations_id: { + type: 'number', + nullable: true + }, + first_nations_name: { + type: 'string', + nullable: true + }, + proprietor_name: { + type: 'string', + nullable: true + }, + proprietary_data_category: { + type: 'number', + nullable: true + }, + category_rationale: { + type: 'string', + nullable: true + } + } + }, + location: { + description: 'Survey location Details', + type: 'object', + required: ['survey_area_name', 'geometry'], + properties: { + survey_area_name: { + type: 'string' + }, + geometry: { + type: 'array', + items: { + ...(geoJsonFeature as object) + } + } + } + } + } + } + } + } + } + } + } + } +}; + +export function getSurveyForUpdate(): RequestHandler { + return async (req, res) => { + const surveyId = Number(req.params.surveyId); + + const connection = getDBConnection(req['keycloak_token']); + + try { + await connection.open(); + + const surveyService = new SurveyService(connection); + + const surveyObject = await surveyService.getSurveyById(surveyId); + + let proprietor: any = surveyObject.proprietor; + + if (surveyObject.proprietor?.proprietor_type_id) { + proprietor['survey_data_proprietary'] = 'true'; + proprietor['proprietary_data_category'] = surveyObject.proprietor?.proprietor_type_id; + proprietor['first_nations_id'] = + surveyObject.proprietor?.first_nations_id !== null ? surveyObject.proprietor?.first_nations_id : 0; + proprietor['disa_required'] = surveyObject.proprietor?.disa_required === true ? 'true' : 'false'; + } else { + proprietor = { + survey_data_proprietary: 'false', + proprietor_type_name: '', + proprietary_data_category: 0, + first_nations_name: '', + first_nations_id: 0, + category_rationale: '', + proprietor_name: '', + disa_required: 'false' + }; + } + + const funding: any = []; + + if (surveyObject.funding && surveyObject.funding.funding_sources) { + surveyObject.funding.funding_sources.forEach((fund) => { + funding.push(fund.pfs_id); + }); + } + + const surveyData = { + ...surveyObject, + proprietor: proprietor, + funding: { + funding_sources: funding + }, + agreements: { + sedis_procedures_accepted: 'true', + foippa_requirements_accepted: 'true' + } + }; + + await connection.commit(); + + return res.status(200).json({ surveyData: surveyData }); + } catch (error) { + defaultLog.error({ label: 'getSurveyForView', message: 'error', error }); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/upload.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/upload.test.ts new file mode 100644 index 0000000000..f90e56bbd7 --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/upload.test.ts @@ -0,0 +1,76 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../../../../database/db'; +import { HTTPError } from '../../../../../errors/http-error'; +import { PlatformService } from '../../../../../services/platform-service'; +import { getMockDBConnection } from '../../../../../__mocks__/db'; +import * as upload from './upload'; + +chai.use(sinonChai); + +describe('uploadSurveyDataToBioHub', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const expectedError = new Error('cannot process request'); + sinon.stub(PlatformService.prototype, 'uploadSurveyDataToBioHub').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; + + try { + const result = upload.uploadSurveyDataToBioHub(); + + await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal(expectedError.message); + } + }); + + it('should upload Survey data to biohub', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1, + surveyId: 2 + } + } as any; + + const uploadSurveyDataToBioHubStub = sinon.stub(PlatformService.prototype, 'uploadSurveyDataToBioHub').resolves(); + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + send: (response: any) => { + actualResult = response; + } + }; + } + }; + + const result = upload.uploadSurveyDataToBioHub(); + + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + expect(actualResult).to.eql(undefined); + expect(uploadSurveyDataToBioHubStub).to.be.calledOnce; + }); +}); diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/upload.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/upload.ts new file mode 100644 index 0000000000..ffba2d66da --- /dev/null +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/upload.ts @@ -0,0 +1,90 @@ +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { SYSTEM_ROLE } from '../../../../../constants/roles'; +import { getDBConnection } from '../../../../../database/db'; +import { authorizeRequestHandler } from '../../../../../request-handlers/security/authorization'; +import { PlatformService } from '../../../../../services/platform-service'; +import { getLogger } from '../../../../../utils/logger'; + +const defaultLog = getLogger('/api/project/{projectId}/survey/{surveyId}/upload'); + +export const POST: Operation = [ + authorizeRequestHandler(() => { + return { + and: [ + { + validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN], + discriminator: 'SystemRole' + } + ] + }; + }), + uploadSurveyDataToBioHub() +]; + +POST.apiDoc = { + description: 'Upload survey/observation data to BioHub.', + tags: ['survey'], + security: [ + { + Bearer: [] + } + ], + parameters: [ + { + in: 'path', + name: 'projectId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + }, + { + in: 'path', + name: 'surveyId', + schema: { + type: 'integer', + minimum: 1 + }, + required: true + } + ], + responses: { + 200: { + description: 'Upload survey/observation data to BioHub OK.' + }, + 401: { + $ref: '#/components/responses/401' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +export function uploadSurveyDataToBioHub(): RequestHandler { + return async (req, res) => { + const projectId = Number(req.params.projectId); + const surveyId = Number(req.params.surveyId); + + const connection = getDBConnection(req['keycloak_token']); + + try { + await connection.open(); + + const platformService = new PlatformService(connection); + await platformService.uploadSurveyDataToBioHub(projectId, surveyId); + + await connection.commit(); + + return res.status(200).send(); + } catch (error) { + defaultLog.error({ label: 'uploadSurveyDataToBioHub', message: 'error', error }); + await connection.rollback(); + throw error; + } finally { + connection.release(); + } + }; +} diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/view.test.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/view.test.ts index d678d3bd8f..f3210c4fcf 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/view.test.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/view.test.ts @@ -4,7 +4,7 @@ import OpenAPIResponseValidator, { OpenAPIResponseValidatorArgs } from 'openapi- import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; +import { HTTPError } from '../../../../../errors/http-error'; import { SurveyObject } from '../../../../../models/survey-view'; import { SurveyService } from '../../../../../services/survey-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../../__mocks__/db'; @@ -26,7 +26,6 @@ describe('survey/{surveyId}/view', () => { end_date: '2020-05-05', biologist_first_name: 'first', biologist_last_name: 'last', - publish_date: '', revision_count: 1 }, species: { @@ -97,7 +96,6 @@ describe('survey/{surveyId}/view', () => { end_date: '2020-05-05', biologist_first_name: 'first', biologist_last_name: 'last', - publish_date: null, revision_count: 1 }, species: { diff --git a/api/src/paths/project/{projectId}/survey/{surveyId}/view.ts b/api/src/paths/project/{projectId}/survey/{surveyId}/view.ts index e2002768eb..0b76ee1be1 100644 --- a/api/src/paths/project/{projectId}/survey/{surveyId}/view.ts +++ b/api/src/paths/project/{projectId}/survey/{surveyId}/view.ts @@ -82,7 +82,6 @@ GET.apiDoc = { 'start_date', 'biologist_first_name', 'biologist_last_name', - 'publish_date', 'revision_count' ], properties: { @@ -104,11 +103,6 @@ GET.apiDoc = { biologist_last_name: { type: 'string' }, - publish_date: { - oneOf: [{ type: 'object' }, { type: 'string', format: 'date' }], - nullable: true, - description: 'Determines if the record has been published' - }, revision_count: { type: 'number' } @@ -150,15 +144,25 @@ GET.apiDoc = { permit: { description: 'Survey Permit', type: 'object', - required: ['permit_number', 'permit_type'], properties: { - permit_number: { - type: 'string', - nullable: true - }, - permit_type: { - type: 'string', - nullable: true + permits: { + type: 'array', + items: { + type: 'object', + required: ['permit_id', 'permit_number', 'permit_type'], + properties: { + permit_id: { + type: 'number', + minimum: 1 + }, + permit_number: { + type: 'string' + }, + permit_type: { + type: 'string' + } + } + } } } }, @@ -214,7 +218,6 @@ GET.apiDoc = { 'intended_outcome_id', 'ecological_season_id', 'vantage_code_ids', - 'surveyed_all_areas', 'revision_count' ], properties: { @@ -238,10 +241,6 @@ GET.apiDoc = { items: { type: 'number' } - }, - surveyed_all_areas: { - type: 'string', - enum: ['true', 'false'] } } }, diff --git a/api/src/paths/project/{projectId}/surveys.test.ts b/api/src/paths/project/{projectId}/surveys.test.ts new file mode 100644 index 0000000000..8ff90056bb --- /dev/null +++ b/api/src/paths/project/{projectId}/surveys.test.ts @@ -0,0 +1,105 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../../database/db'; +import { HTTPError } from '../../../errors/http-error'; +import { SurveyObject } from '../../../models/survey-view'; +import { SurveyService } from '../../../services/survey-service'; +import { getMockDBConnection } from '../../../__mocks__/db'; +import * as surveys from './surveys'; + +chai.use(sinonChai); + +describe('surveys', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw a 400 error when projectId is missing in Path', async () => { + try { + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: null + } + } as any; + + const result = surveys.getSurveyList(); + + await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).status).to.equal(400); + expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); + } + }); + + it('should throw an error when a failure occurs', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const expectedError = new Error('cannot process request'); + sinon.stub(SurveyService.prototype, 'getSurveyIdsByProjectId').rejects(expectedError); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1 + } + } as any; + + try { + const result = surveys.getSurveyList(); + + await result(sampleReq, (null as unknown) as any, (null as unknown) as any); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal(expectedError.message); + } + }); + + it('should succeed with valid Id', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const getSurveyIdsByProjectIdStub = sinon + .stub(SurveyService.prototype, 'getSurveyIdsByProjectId') + .resolves([{ id: 1 }]); + + const getSurveysByIdsStub = sinon + .stub(SurveyService.prototype, 'getSurveysByIds') + .resolves([({ survey_details: { id: 1 } } as unknown) as SurveyObject]); + + const sampleReq = { + keycloak_token: {}, + body: {}, + params: { + projectId: 1 + } + } as any; + + const expectedResponse = [{ survey_details: { id: 1 } }]; + + let actualResult: any = null; + const sampleRes = { + status: () => { + return { + json: (response: any) => { + actualResult = response; + } + }; + } + }; + + const result = surveys.getSurveyList(); + + await result(sampleReq, (sampleRes as unknown) as any, (null as unknown) as any); + + expect(actualResult).to.eql(expectedResponse); + expect(getSurveyIdsByProjectIdStub).to.be.calledOnce; + expect(getSurveysByIdsStub).to.be.calledOnce; + }); +}); diff --git a/api/src/paths/project/{projectId}/surveys.ts b/api/src/paths/project/{projectId}/surveys.ts index 45ac18b757..dbe8bf7bd0 100644 --- a/api/src/paths/project/{projectId}/surveys.ts +++ b/api/src/paths/project/{projectId}/surveys.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../constants/roles'; import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; +import { HTTP400 } from '../../../errors/http-error'; import { geoJsonFeature } from '../../../openapi/schemas/geoJson'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; import { SurveyService } from '../../../services/survey-service'; @@ -64,7 +64,6 @@ GET.apiDoc = { 'biologist_last_name', 'start_date', 'geometry', - 'publish_date', 'survey_area_name', 'survey_name', 'revision_count' @@ -95,11 +94,6 @@ GET.apiDoc = { ...(geoJsonFeature as object) } }, - publish_date: { - oneOf: [{ type: 'object' }, { type: 'string', format: 'date' }], - nullable: true, - description: 'Determines if the record has been published' - }, survey_area_name: { type: 'string' }, @@ -145,17 +139,27 @@ GET.apiDoc = { } }, permit: { - description: 'Survey Permit', type: 'object', - required: ['permit_number', 'permit_type'], + description: 'Survey Permit Information', properties: { - permit_number: { - type: 'string', - nullable: true - }, - permit_type: { - type: 'string', - nullable: true + permits: { + description: 'Survey Permits', + type: 'array', + items: { + required: ['permit_id', 'permit_number', 'permit_type'], + properties: { + permit_id: { + type: 'number', + minimum: 1 + }, + permit_number: { + type: 'string' + }, + permit_type: { + type: 'string' + } + } + } } } }, @@ -204,8 +208,7 @@ GET.apiDoc = { 'additional_details', 'intended_outcome_id', 'ecological_season_id', - 'vantage_code_ids', - 'surveyed_all_areas' + 'vantage_code_ids' ], properties: { field_method_id: { @@ -228,10 +231,6 @@ GET.apiDoc = { items: { type: 'number' } - }, - surveyed_all_areas: { - type: 'string', - enum: ['true', 'false'] } } }, @@ -273,6 +272,9 @@ GET.apiDoc = { type: 'string' } } + }, + docs_to_be_reviewed: { + type: 'number' } } } diff --git a/api/src/paths/project/{projectId}/update.test.ts b/api/src/paths/project/{projectId}/update.test.ts index 98767455ca..091bc841ff 100644 --- a/api/src/paths/project/{projectId}/update.test.ts +++ b/api/src/paths/project/{projectId}/update.test.ts @@ -3,8 +3,8 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; -import { GetPermitData } from '../../../models/project-view'; +import { HTTPError } from '../../../errors/http-error'; +import { PlatformService } from '../../../services/platform-service'; import { ProjectService } from '../../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../__mocks__/db'; import * as update from './update'; @@ -47,7 +47,6 @@ describe('update', () => { const sampleResponse = { id: 1, coordinator: undefined, - permit: new GetPermitData(), project: undefined, objectives: undefined, location: undefined, @@ -61,7 +60,7 @@ describe('update', () => { }; mockReq.query = { - entity: ['permit'] + entity: ['coordinator'] }; sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); @@ -73,7 +72,7 @@ describe('update', () => { await requestHandler(mockReq, mockRes, mockNext); expect(mockRes.statusValue).to.equal(200); - expect(ProjectService.prototype.getProjectEntitiesById).called.calledWith(1, ['permit']); + expect(ProjectService.prototype.getProjectEntitiesById).called.calledWith(1, ['coordinator']); expect(mockRes.sendValue).to.equal(sampleResponse); }); }); @@ -145,12 +144,10 @@ describe('update', () => { start_date: '2022-02-02', end_date: '2022-02-30', objectives: 'my objectives', - publish_date: '2022-02-02', revision_count: 0 }, iucn: {}, contact: {}, - permit: {}, funding: {}, partnerships: {}, location: {} @@ -160,6 +157,8 @@ describe('update', () => { sinon.stub(ProjectService.prototype, 'updateProject').resolves(); + sinon.stub(PlatformService.prototype, 'submitDwCAMetadataPackage').resolves(); + const requestHandler = update.updateProject(); await requestHandler(mockReq, mockRes, mockNext); diff --git a/api/src/paths/project/{projectId}/update.ts b/api/src/paths/project/{projectId}/update.ts index fc6ae3ba0d..9c45bb8900 100644 --- a/api/src/paths/project/{projectId}/update.ts +++ b/api/src/paths/project/{projectId}/update.ts @@ -2,10 +2,11 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../../constants/roles'; import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; +import { HTTP400 } from '../../../errors/http-error'; import { geoJsonFeature } from '../../../openapi/schemas/geoJson'; import { projectIdResponseObject, projectUpdatePutRequestObject } from '../../../openapi/schemas/project'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; +import { PlatformService } from '../../../services/platform-service'; import { ProjectService } from '../../../services/project-service'; import { getLogger } from '../../../utils/logger'; @@ -28,7 +29,6 @@ export const GET: Operation = [ export enum GET_ENTITIES { coordinator = 'coordinator', - permit = 'permit', project = 'project', objectives = 'objectives', location = 'location', @@ -86,7 +86,6 @@ GET.apiDoc = { 'project_activities', 'start_date', 'end_date', - 'publish_date', 'revision_count' ], nullable: true, @@ -113,38 +112,11 @@ GET.apiDoc = { format: 'date', description: 'ISO 8601 date string for the project end date' }, - publish_date: { - description: 'Status of the project being published/unpublished', - format: 'date', - type: 'string' - }, revision_count: { type: 'number' } } }, - permit: { - type: 'object', - required: ['permits'], - nullable: true, - properties: { - permits: { - type: 'array', - items: { - title: 'Project permit', - type: 'object', - properties: { - permit_number: { - type: 'string' - }, - permit_type: { - type: 'string' - } - } - } - } - } - }, coordinator: { title: 'Project coordinator', type: 'object', @@ -430,7 +402,6 @@ PUT.apiDoc = { export interface IUpdateProject { coordinator: object | null; - permit: object | null; project: object | null; objectives: object | null; location: object | null; @@ -467,6 +438,14 @@ export function updateProject(): RequestHandler { await projectService.updateProject(projectId, entities); + try { + const platformService = new PlatformService(connection); + await platformService.submitDwCAMetadataPackage(projectId); + } catch (error) { + // Don't fail the rest of the endpoint if submitting metadata fails + defaultLog.error({ label: 'updateProject->submitDwCAMetadataPackage', message: 'error', error }); + } + await connection.commit(); return res.status(200).json({ id: projectId }); diff --git a/api/src/paths/project/{projectId}/view.test.ts b/api/src/paths/project/{projectId}/view.test.ts index fd1c223348..68a66c75ff 100644 --- a/api/src/paths/project/{projectId}/view.test.ts +++ b/api/src/paths/project/{projectId}/view.test.ts @@ -4,7 +4,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; +import { HTTPError } from '../../../errors/http-error'; import { ProjectService } from '../../../services/project-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../__mocks__/db'; import { GET, viewProject } from './view'; diff --git a/api/src/paths/project/{projectId}/view.ts b/api/src/paths/project/{projectId}/view.ts index 54ae6159fe..034afc7d3a 100644 --- a/api/src/paths/project/{projectId}/view.ts +++ b/api/src/paths/project/{projectId}/view.ts @@ -50,17 +50,7 @@ GET.apiDoc = { schema: { title: 'Project get response object, for view purposes', type: 'object', - required: [ - 'id', - 'project', - 'permit', - 'coordinator', - 'objectives', - 'location', - 'iucn', - 'funding', - 'partnerships' - ], + required: ['id', 'project', 'coordinator', 'objectives', 'location', 'iucn', 'funding', 'partnerships'], properties: { id: { description: 'Project id', @@ -76,8 +66,7 @@ GET.apiDoc = { 'start_date', 'end_date', 'comments', - 'completion_status', - 'publish_date' + 'completion_status' ], properties: { project_name: { @@ -109,32 +98,6 @@ GET.apiDoc = { completion_status: { description: 'Status of the project being active/completed', type: 'string' - }, - publish_date: { - description: 'Status of the project being published/unpublished', - format: 'date', - type: 'string' - } - } - }, - permit: { - type: 'object', - required: ['permits'], - properties: { - permits: { - type: 'array', - items: { - title: 'Project permit', - type: 'object', - properties: { - permit_number: { - type: 'string' - }, - permit_type: { - type: 'string' - } - } - } } } }, diff --git a/api/src/paths/public/project/list.test.ts b/api/src/paths/public/project/list.test.ts deleted file mode 100644 index e5305bdc5f..0000000000 --- a/api/src/paths/public/project/list.test.ts +++ /dev/null @@ -1,94 +0,0 @@ -import Ajv from 'ajv'; -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; -import { ProjectService } from '../../../services/project-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../__mocks__/db'; -import { GET, getPublicProjectsList } from './list'; - -chai.use(sinonChai); - -describe('list', () => { - describe('openapi schema', () => { - const ajv = new Ajv(); - - it('is valid openapi v3 schema', () => { - expect(ajv.validateSchema((GET.apiDoc as unknown) as object)).to.be.true; - }); - }); - - describe('getPublicProjectsList', () => { - afterEach(() => { - sinon.restore(); - }); - - it('returns an empty array if no project ids are found', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(ProjectService.prototype, 'getPublicProjectsList').resolves([]); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getPublicProjectsList(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.statusValue).to.equal(200); - expect(mockRes.jsonValue).to.eql([]); - }); - - it('returns an array of projects', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - const mockProject1 = ({ project: { project_id: 1 } } as unknown) as any; - const mockProject2 = ({ project: { project_id: 2 } } as unknown) as any; - - sinon.stub(ProjectService.prototype, 'getPublicProjectsList').resolves([mockProject1, mockProject2]); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getPublicProjectsList(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.jsonValue).to.eql([mockProject1, mockProject2]); - expect(mockRes.statusValue).to.equal(200); - }); - - it('catches error, calls rollback, and re-throws error', async () => { - const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(ProjectService.prototype, 'getPublicProjectsList').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getPublicProjectsList(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.release).to.have.been.called; - - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); - }); -}); diff --git a/api/src/paths/public/project/list.ts b/api/src/paths/public/project/list.ts deleted file mode 100644 index 469b5d8815..0000000000 --- a/api/src/paths/public/project/list.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { getAPIUserDBConnection } from '../../../database/db'; -import { projectIdResponseObject } from '../../../openapi/schemas/project'; -import { ProjectService } from '../../../services/project-service'; -import { getLogger } from '../../../utils/logger'; - -const defaultLog = getLogger('paths/public/projects'); - -export const GET: Operation = [getPublicProjectsList()]; - -GET.apiDoc = { - description: 'Gets a list of public facing (published) projects.', - tags: ['public', 'projects'], - responses: { - 200: { - description: 'Project response object.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - ...(projectIdResponseObject as object) - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 403: { - $ref: '#/components/responses/401' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -/** - * Get all public facing (published) projects. - * - * @returns {RequestHandler} - */ -export function getPublicProjectsList(): RequestHandler { - return async (req, res) => { - const connection = getAPIUserDBConnection(); - - try { - await connection.open(); - - const projectService = new ProjectService(connection); - - const projects = await projectService.getPublicProjectsList(); - - await connection.commit(); - - return res.status(200).json(projects); - } catch (error) { - defaultLog.error({ label: 'getPublicProjectsList', message: 'error', error }); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/public/project/{projectId}/attachments/list.test.ts b/api/src/paths/public/project/{projectId}/attachments/list.test.ts deleted file mode 100644 index b6b9e455e3..0000000000 --- a/api/src/paths/public/project/{projectId}/attachments/list.test.ts +++ /dev/null @@ -1,236 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../database/db'; -import { HTTPError } from '../../../../../errors/custom-error'; -import public_queries from '../../../../../queries/public'; -import { getMockDBConnection } from '../../../../../__mocks__/db'; -import * as list from './list'; - -chai.use(sinonChai); - -describe('getPublicProjectAttachments', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no projectId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = list.getPublicProjectAttachments(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectAttachmentsSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentsSQL').returns(null); - - try { - const result = list.getPublicProjectAttachments(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should return a list of project attachments where the lastModified is the create_date', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - file_name: 'name1', - create_date: '2020-01-01', - update_date: '', - file_size: 50, - file_type: 'Image', - is_secured: null - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - id: 14, - file_name: 'name2', - create_date: '2020-01-01', - update_date: '', - file_size: 50, - file_type: 'Report', - is_secured: null - } - ] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentsSQL').returns(SQL`something`); - - const result = list.getPublicProjectAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.an('object'); - expect(actualResult).to.have.property('attachmentsList'); - - expect(actualResult.attachmentsList).to.be.an('array'); - expect(actualResult.attachmentsList).to.have.length(2); - - expect(actualResult.attachmentsList[0].fileName).to.equal('name1'); - expect(actualResult.attachmentsList[0].fileType).to.equal('Image'); - expect(actualResult.attachmentsList[0].id).to.equal(13); - expect(actualResult.attachmentsList[0].lastModified).to.match(new RegExp('2020-01-01T.*')); - expect(actualResult.attachmentsList[0].size).to.equal(50); - expect(actualResult.attachmentsList[0].securityToken).to.equal('false'); - - expect(actualResult.attachmentsList[1].fileName).to.equal('name2'); - expect(actualResult.attachmentsList[1].fileType).to.equal('Report'); - expect(actualResult.attachmentsList[1].id).to.equal(14); - expect(actualResult.attachmentsList[1].lastModified).to.match(new RegExp('2020-01-01T.*')); - expect(actualResult.attachmentsList[1].size).to.equal(50); - expect(actualResult.attachmentsList[1].securityToken).to.equal('false'); - }); - - it('should return a list of project attachments where the lastModified is the update_date', async () => { - const mockQuery = sinon.stub(); - - mockQuery - .onFirstCall() - .resolves({ - rows: [ - { - id: 13, - file_name: 'name1', - create_date: '2020-01-01', - update_date: '2020-04-04', - file_size: 50, - file_type: 'Image', - is_secured: null - } - ] - }) - .onSecondCall() - .resolves({ - rows: [ - { - id: 14, - file_name: 'name2', - create_date: '2020-01-01', - update_date: '2020-04-04', - file_size: 50, - file_type: 'Report', - is_secured: null - } - ] - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentsSQL').returns(SQL`something`); - - const result = list.getPublicProjectAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.an('object'); - expect(actualResult).to.have.property('attachmentsList'); - - expect(actualResult.attachmentsList).to.be.an('array'); - expect(actualResult.attachmentsList).to.have.length(2); - - expect(actualResult.attachmentsList[0].fileName).to.equal('name1'); - expect(actualResult.attachmentsList[0].fileType).to.equal('Image'); - expect(actualResult.attachmentsList[0].id).to.equal(13); - expect(actualResult.attachmentsList[0].lastModified).to.match(new RegExp('2020-04-04T.*')); - expect(actualResult.attachmentsList[0].size).to.equal(50); - expect(actualResult.attachmentsList[0].securityToken).to.equal('false'); - - expect(actualResult.attachmentsList[1].fileName).to.equal('name2'); - expect(actualResult.attachmentsList[1].fileType).to.equal('Report'); - expect(actualResult.attachmentsList[1].id).to.equal(14); - expect(actualResult.attachmentsList[1].lastModified).to.match(new RegExp('2020-04-04T.*')); - expect(actualResult.attachmentsList[1].size).to.equal(50); - expect(actualResult.attachmentsList[1].securityToken).to.equal('false'); - }); - - it('should return null if the project has no attachments, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: undefined }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentsSQL').returns(SQL`something`); - - const result = list.getPublicProjectAttachments(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.null; - }); -}); diff --git a/api/src/paths/public/project/{projectId}/attachments/list.ts b/api/src/paths/public/project/{projectId}/attachments/list.ts deleted file mode 100644 index 34e101599d..0000000000 --- a/api/src/paths/public/project/{projectId}/attachments/list.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { getAPIUserDBConnection } from '../../../../../database/db'; -import { HTTP400 } from '../../../../../errors/custom-error'; -import { GetPublicAttachmentsData } from '../../../../../models/public/project'; -import { queries } from '../../../../../queries/queries'; -import { getLogger } from '../../../../../utils/logger'; - -const defaultLog = getLogger('/api/public/project/{projectId}/attachments/list'); - -export const GET: Operation = [getPublicProjectAttachments()]; - -GET.apiDoc = { - description: 'Fetches a list of attachments of a public (published) project.', - tags: ['attachments'], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - } - ], - responses: { - 200: { - description: 'Project get response file description array.', - content: { - 'application/json': { - schema: { - type: 'object', - properties: { - attachmentsList: { - type: 'array', - items: { - type: 'object', - required: ['id', 'fileName', 'fileType', 'lastModified', 'securityToken', 'size'], - properties: { - id: { - type: 'number' - }, - fileName: { - type: 'string' - }, - fileType: { - type: 'string' - }, - lastModified: { - type: 'string' - }, - securedToken: { - type: 'string', - enum: ['true', 'false'] - }, - size: { - type: 'number' - } - } - } - } - } - } - } - } - }, - 401: { - $ref: '#/components/responses/401' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getPublicProjectAttachments(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ label: 'Get attachments list', message: 'params', req_params: req.params }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - const connection = getAPIUserDBConnection(); - - try { - const getPublicProjectAttachmentsSQLStatement = queries.public.getPublicProjectAttachmentsSQL( - Number(req.params.projectId) - ); - const getPublicProjectReportAttachmentsSQLStatement = queries.public.getPublicProjectReportAttachmentsSQL( - Number(req.params.projectId) - ); - - if (!getPublicProjectAttachmentsSQLStatement || !getPublicProjectReportAttachmentsSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - await connection.open(); - - const attachmentsData = await connection.query( - getPublicProjectAttachmentsSQLStatement.text, - getPublicProjectAttachmentsSQLStatement.values - ); - - const reportAttachmentsData = await connection.query( - getPublicProjectReportAttachmentsSQLStatement.text, - getPublicProjectReportAttachmentsSQLStatement.values - ); - - await connection.commit(); - - const getAttachmentsData = - (attachmentsData && - reportAttachmentsData && - attachmentsData.rows && - reportAttachmentsData.rows && - new GetPublicAttachmentsData([...attachmentsData.rows, ...reportAttachmentsData.rows])) || - null; - - return res.status(200).json(getAttachmentsData); - } catch (error) { - defaultLog.error({ label: 'getPublicProjectAttachments', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts b/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts deleted file mode 100644 index 12731ccb42..0000000000 --- a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl.test.ts +++ /dev/null @@ -1,232 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import { ATTACHMENT_TYPE } from '../../../../../../constants/attachments'; -import * as db from '../../../../../../database/db'; -import { HTTPError } from '../../../../../../errors/custom-error'; -import public_queries from '../../../../../../queries/public'; -import * as file_utils from '../../../../../../utils/file-utils'; -import { getMockDBConnection } from '../../../../../../__mocks__/db'; -import * as get_signed_url from './getSignedUrl'; - -chai.use(sinonChai); - -describe('getAttachmentSignedURL', () => { - afterEach(() => { - sinon.restore(); - }); - - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - params: { - projectId: 1, - attachmentId: 2 - }, - query: { - attachmentType: 'Other' - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - it('should throw an error when projectId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getAttachmentSignedURL(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw an error when attachmentId is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getAttachmentSignedURL(); - - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw an error when attachmentType is missing', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_signed_url.getAttachmentSignedURL(); - - await result( - { ...sampleReq, query: { ...sampleReq.query, attachmentType: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required query param `attachmentType`'); - } - }); - - it('should return null when getting signed url from S3 fails', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves(null); - - const result = get_signed_url.getAttachmentSignedURL(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - describe('non report attachments', () => { - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentS3KeySQL').returns(null); - - try { - const result = get_signed_url.getAttachmentSignedURL(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build attachment S3 key SQLstatement'); - } - }); - - it('should return the attachment signed url response on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); - - const result = get_signed_url.getAttachmentSignedURL(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql('myurlsigned.com'); - }); - }); - - describe('report attachments', () => { - it('should throw a 400 error when no sql statement returned', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(public_queries, 'getPublicProjectReportAttachmentS3KeySQL').returns(null); - - try { - const result = get_signed_url.getAttachmentSignedURL(); - - await result( - { - ...sampleReq, - query: { - attachmentType: ATTACHMENT_TYPE.REPORT - } - }, - sampleRes as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build report attachment S3 key SQLstatement'); - } - }); - - it('should return the report attachment signed url response on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [{ key: 's3Key' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectReportAttachmentS3KeySQL').returns(SQL`some query`); - sinon.stub(file_utils, 'getS3SignedURL').resolves('myurlsigned.com'); - - const result = get_signed_url.getAttachmentSignedURL(); - - await result( - { - ...sampleReq, - query: { - attachmentType: ATTACHMENT_TYPE.REPORT - } - }, - sampleRes as any, - (null as unknown) as any - ); - - expect(actualResult).to.eql('myurlsigned.com'); - }); - }); -}); diff --git a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts b/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts deleted file mode 100644 index 50bec442e7..0000000000 --- a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl.ts +++ /dev/null @@ -1,174 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { ATTACHMENT_TYPE } from '../../../../../../constants/attachments'; -import { getAPIUserDBConnection, IDBConnection } from '../../../../../../database/db'; -import { HTTP400 } from '../../../../../../errors/custom-error'; -import { queries } from '../../../../../../queries/queries'; -import { getS3SignedURL } from '../../../../../../utils/file-utils'; -import { getLogger } from '../../../../../../utils/logger'; - -const defaultLog = getLogger('/api/public/project/{projectId}/attachments/{attachmentId}/getSignedUrl'); - -export const GET: Operation = [getAttachmentSignedURL()]; - -GET.apiDoc = { - description: 'Retrieves the signed url of a public project attachment.', - tags: ['attachment'], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'attachmentId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'query', - name: 'attachmentType', - schema: { - type: 'string', - enum: ['Report', 'Other'] - }, - required: true - } - ], - responses: { - 200: { - description: 'Response containing the signed url of an attachment.', - content: { - 'text/plain': { - schema: { - type: 'string' - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getAttachmentSignedURL(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'getAttachmentSignedURL', - message: 'params', - req_params: req.params, - req_query: req.query - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - if (!req.query.attachmentType) { - throw new HTTP400('Missing required query param `attachmentType`'); - } - - const connection = getAPIUserDBConnection(); - - await connection.open(); - - try { - await connection.open(); - - let s3Key; - - if (req.query.attachmentType === ATTACHMENT_TYPE.REPORT) { - s3Key = await getPublicProjectReportAttachmentS3Key( - Number(req.params.projectId), - Number(req.params.attachmentId), - connection - ); - } else { - s3Key = await getPublicProjectAttachmentS3Key( - Number(req.params.projectId), - Number(req.params.attachmentId), - connection - ); - } - - await connection.commit(); - - const s3SignedUrl = s3Key && (await getS3SignedURL(s3Key)); - - if (!s3SignedUrl) { - return res.status(200).json(null); - } - - return res.status(200).json(s3SignedUrl); - } catch (error) { - defaultLog.error({ label: 'getAttachmentSignedURL', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} - -export const getPublicProjectAttachmentS3Key = async ( - projectId: number, - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.public.getPublicProjectAttachmentS3KeySQL(projectId, attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build attachment S3 key SQLstatement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to get attachment S3 key'); - } - - return response.rows[0].key; -}; - -export const getPublicProjectReportAttachmentS3Key = async ( - projectId: number, - attachmentId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.public.getPublicProjectReportAttachmentS3KeySQL(projectId, attachmentId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build report attachment S3 key SQLstatement'); - } - - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response?.rows?.[0]) { - throw new HTTP400('Failed to get attachment S3 key'); - } - - return response.rows[0].key; -}; diff --git a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts b/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts deleted file mode 100644 index 46d5acd147..0000000000 --- a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/metadata/get.test.ts +++ /dev/null @@ -1,161 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../../../../../../database/db'; -import { HTTPError } from '../../../../../../../errors/custom-error'; -import public_queries from '../../../../../../../queries/public'; -import { getMockDBConnection } from '../../../../../../../__mocks__/db'; -import * as get_project_metadata from './get'; - -chai.use(sinonChai); - -describe('gets metadata for a project report', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {}, - body: {}, - params: { - projectId: 1, - attachmentId: 1 - } - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no projectId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_project_metadata.getPublicReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, projectId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `projectId`'); - } - }); - - it('should throw a 400 error when no attachmentId is provided', async () => { - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - try { - const result = get_project_metadata.getPublicReportMetaData(); - await result( - { ...sampleReq, params: { ...sampleReq.params, attachmentId: null } }, - (null as unknown) as any, - (null as unknown) as any - ); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param `attachmentId`'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectReportAttachmentSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(public_queries, 'getPublicProjectReportAttachmentSQL').returns(null); - - try { - const result = get_project_metadata.getPublicReportMetaData(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build metadata SQLStatement'); - } - }); - - it('should throw a 400 error when no sql statement returned for getProjectReportAuthorsSQL', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - sinon.stub(public_queries, 'getProjectReportAuthorsSQL').returns(null); - - try { - const result = get_project_metadata.getPublicReportMetaData(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build metadata SQLStatement'); - } - }); - - it('should return a project report metadata, on success', async () => { - const mockQuery = sinon.stub(); - - mockQuery.onCall(0).resolves({ - rowCount: 1, - rows: [ - { - attachment_id: 1, - title: 'My report', - update_date: '2020-10-10', - description: 'some description', - year_published: 2020, - revision_count: '1' - } - ] - }); - mockQuery.onCall(1).resolves({ rowCount: 1, rows: [{ first_name: 'John', last_name: 'Smith' }] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - - sinon.stub(public_queries, 'getPublicProjectReportAttachmentSQL').returns(SQL`something`); - sinon.stub(public_queries, 'getProjectReportAuthorsSQL').returns(SQL`something`); - - const result = get_project_metadata.getPublicReportMetaData(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.be.eql({ - attachment_id: 1, - title: 'My report', - last_modified: '2020-10-10', - description: 'some description', - year_published: 2020, - revision_count: '1', - authors: [{ first_name: 'John', last_name: 'Smith' }] - }); - }); -}); diff --git a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/metadata/get.ts b/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/metadata/get.ts deleted file mode 100644 index 7c5a0c0858..0000000000 --- a/api/src/paths/public/project/{projectId}/attachments/{attachmentId}/metadata/get.ts +++ /dev/null @@ -1,177 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { getAPIUserDBConnection } from '../../../../../../../database/db'; -import { HTTP400 } from '../../../../../../../errors/custom-error'; -import { GetReportAttachmentMetadata } from '../../../../../../../models/project-survey-attachments'; -import { queries } from '../../../../../../../queries/queries'; -import { getLogger } from '../../../../../../../utils/logger'; - -const defaultLog = getLogger('/api/project/{projectId}/attachments/{attachmentId}/getSignedUrl'); - -export const GET: Operation = [getPublicReportMetaData()]; - -GET.apiDoc = { - description: 'Retrieves the report metadata of a project attachment if filetype is Report.', - tags: ['attachment'], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'attachmentId', - schema: { - type: 'number' - }, - required: true - } - ], - responses: { - 200: { - description: 'Response of the report metadata', - content: { - 'application/json': { - schema: { - title: 'metadata get response object', - type: 'object', - required: [ - 'attachment_id', - 'title', - 'last_modified', - 'description', - 'year_published', - 'revision_count', - 'authors' - ], - properties: { - attachment_id: { - description: 'Report metadata attachment id', - type: 'number' - }, - title: { - description: 'Report metadata attachment title ', - type: 'string' - }, - last_modified: { - description: 'Report metadata last modified', - type: 'string' - }, - description: { - description: 'Report metadata description', - type: 'string' - }, - year_published: { - description: 'Report metadata year published', - type: 'number' - }, - revision_count: { - description: 'Report metadata revision count', - type: 'number' - }, - authors: { - description: 'Report metadata author object', - type: 'array', - items: { - type: 'object', - required: ['first_name', 'last_name'], - properties: { - first_name: { - type: 'string' - }, - last_name: { - type: 'string' - } - } - } - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/403' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getPublicReportMetaData(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'getSurveyReportMetaData', - message: 'params', - req_params: req.params, - req_query: req.query - }); - - if (!req.params.projectId) { - throw new HTTP400('Missing required path param `projectId`'); - } - - if (!req.params.attachmentId) { - throw new HTTP400('Missing required path param `attachmentId`'); - } - - const connection = getAPIUserDBConnection(); - - try { - const getPublicProjectReportAttachmentSQLStatement = queries.public.getPublicProjectReportAttachmentSQL( - Number(req.params.projectId), - Number(req.params.attachmentId) - ); - - const getProjectReportAuthorsSQLStatement = queries.public.getProjectReportAuthorsSQL( - Number(req.params.attachmentId) - ); - - if (!getPublicProjectReportAttachmentSQLStatement || !getProjectReportAuthorsSQLStatement) { - throw new HTTP400('Failed to build metadata SQLStatement'); - } - - await connection.open(); - - const reportMetaData = await connection.query( - getPublicProjectReportAttachmentSQLStatement.text, - getPublicProjectReportAttachmentSQLStatement.values - ); - - const reportAuthorsData = await connection.query( - getProjectReportAuthorsSQLStatement.text, - getProjectReportAuthorsSQLStatement.values - ); - - await connection.commit(); - - const getReportMetaData = reportMetaData && reportMetaData.rows[0]; - - const getReportAuthorsData = reportAuthorsData && reportAuthorsData.rows; - - const reportMetaObj = new GetReportAttachmentMetadata(getReportMetaData, getReportAuthorsData); - - return res.status(200).json(reportMetaObj); - } catch (error) { - defaultLog.error({ label: 'getPublicReportMetadata', message: 'error', error }); - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/public/project/{projectId}/view.test.ts b/api/src/paths/public/project/{projectId}/view.test.ts deleted file mode 100644 index dab712bc12..0000000000 --- a/api/src/paths/public/project/{projectId}/view.test.ts +++ /dev/null @@ -1,72 +0,0 @@ -import Ajv from 'ajv'; -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; -import { ProjectService } from '../../../../services/project-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; -import { GET, getPublicProjectForView } from './view'; - -chai.use(sinonChai); - -describe('project/{projectId}/view', () => { - describe('openapi schema', () => { - const ajv = new Ajv(); - - it('is valid openapi v3 schema', () => { - expect(ajv.validateSchema((GET.apiDoc as unknown) as object)).to.be.true; - }); - }); - - describe('viewPublicProject', () => { - afterEach(() => { - sinon.restore(); - }); - - it('fetches a project', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - const viewProjectResult = { id: 1 }; - - sinon.stub(ProjectService.prototype, 'getPublicProjectById').resolves(viewProjectResult as any); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getPublicProjectForView(); - - await requestHandler(mockReq, mockRes, mockNext); - } catch (actualError) { - expect.fail(); - } - - expect(mockRes.statusValue).to.equal(200); - expect(mockRes.jsonValue).to.eql(viewProjectResult); - }); - - it('catches and re-throws error', async () => { - const dbConnectionObj = getMockDBConnection({ release: sinon.stub() }); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(ProjectService.prototype, 'getPublicProjectById').rejects(new Error('a test error')); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - try { - const requestHandler = getPublicProjectForView(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect(dbConnectionObj.release).to.have.been.called; - - expect((actualError as HTTPError).message).to.equal('a test error'); - } - }); - }); -}); diff --git a/api/src/paths/public/project/{projectId}/view.ts b/api/src/paths/public/project/{projectId}/view.ts deleted file mode 100644 index 7485a53408..0000000000 --- a/api/src/paths/public/project/{projectId}/view.ts +++ /dev/null @@ -1,315 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { getAPIUserDBConnection } from '../../../../database/db'; -import { geoJsonFeature } from '../../../../openapi/schemas/geoJson'; -import { ProjectService } from '../../../../services/project-service'; -import { getLogger } from '../../../../utils/logger'; - -const defaultLog = getLogger('paths/public/project/{projectId}/view'); - -export const GET: Operation = [getPublicProjectForView()]; - -GET.apiDoc = { - description: 'Get a public (published) project, for view-only purposes.', - tags: ['project'], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - } - ], - responses: { - 200: { - description: 'Project with matching projectId.', - content: { - 'application/json': { - schema: { - title: 'Project get response object, for view purposes', - type: 'object', - required: [ - 'id', - 'project', - 'permit', - 'coordinator', - 'objectives', - 'location', - 'iucn', - 'funding', - 'partnerships' - ], - properties: { - id: { - description: 'Project id', - type: 'number' - }, - project: { - description: 'Basic project metadata', - type: 'object', - required: [ - 'project_name', - 'project_type', - 'project_activities', - 'start_date', - 'end_date', - 'comments', - 'completion_status', - 'publish_date' - ], - properties: { - project_name: { - type: 'string' - }, - project_type: { - type: 'number' - }, - project_activities: { - type: 'array', - items: { - type: 'number' - } - }, - start_date: { - type: 'string', - format: 'date', - description: 'ISO 8601 date string for the project start date' - }, - end_date: { - type: 'string', - format: 'date', - description: 'ISO 8601 date string for the project end date' - }, - comments: { - type: 'string', - description: 'Comments' - }, - completion_status: { - description: 'Status of the project being active/completed', - type: 'string' - }, - publish_date: { - description: 'Status of the project being published/unpublished', - format: 'date', - type: 'string' - } - } - }, - permit: { - type: 'object', - required: ['permits'], - properties: { - permits: { - type: 'array', - items: { - title: 'Project permit', - type: 'object', - properties: { - permit_number: { - type: 'string' - }, - permit_type: { - type: 'string' - } - } - } - } - } - }, - coordinator: { - title: 'Project coordinator', - type: 'object', - required: ['first_name', 'last_name', 'email_address', 'coordinator_agency', 'share_contact_details'], - properties: { - first_name: { - type: 'string' - }, - last_name: { - type: 'string' - }, - email_address: { - type: 'string' - }, - coordinator_agency: { - type: 'string' - }, - share_contact_details: { - type: 'string', - enum: ['true', 'false'] - } - } - }, - objectives: { - description: 'The project objectives and caveats', - type: 'object', - required: ['objectives', 'caveats'], - properties: { - objectives: { - type: 'string' - }, - caveats: { - type: 'string' - } - } - }, - location: { - description: 'The project location object', - type: 'object', - required: ['location_description', 'geometry'], - properties: { - location_description: { - type: 'string' - }, - geometry: { - type: 'array', - items: { - ...(geoJsonFeature as object) - } - } - } - }, - iucn: { - description: 'The International Union for Conservation of Nature number', - type: 'object', - required: ['classificationDetails'], - properties: { - classificationDetails: { - type: 'array', - items: { - type: 'object', - properties: { - classification: { - type: 'number' - }, - subClassification1: { - type: 'number' - }, - subClassification2: { - type: 'number' - } - } - } - } - } - }, - funding: { - description: 'The project funding details', - type: 'object', - required: ['fundingSources'], - properties: { - fundingSources: { - type: 'array', - items: { - type: 'object', - properties: { - id: { - type: 'number' - }, - agency_id: { - type: 'number' - }, - investment_action_category: { - type: 'number' - }, - investment_action_category_name: { - type: 'string' - }, - agency_name: { - type: 'string' - }, - funding_amount: { - type: 'number' - }, - start_date: { - type: 'string', - format: 'date', - description: 'ISO 8601 date string for the funding start date' - }, - end_date: { - type: 'string', - format: 'date', - description: 'ISO 8601 date string for the funding end date' - }, - agency_project_id: { - type: 'string', - nullable: true - }, - revision_count: { - type: 'number' - } - } - } - } - } - }, - partnerships: { - description: 'The project partners', - type: 'object', - required: ['indigenous_partnerships', 'stakeholder_partnerships'], - properties: { - indigenous_partnerships: { - type: 'array', - items: { - type: 'number' - } - }, - stakeholder_partnerships: { - type: 'array', - items: { - type: 'string' - } - } - } - } - } - } - } - } - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/401' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -/** - * Get a public (published) project by its id. - * - * @returns {RequestHandler} - */ -export function getPublicProjectForView(): RequestHandler { - return async (req, res) => { - const connection = getAPIUserDBConnection(); - - try { - await connection.open(); - - const projectService = new ProjectService(connection); - - const result = await projectService.getPublicProjectById(Number(req.params.projectId)); - - await connection.commit(); - - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getPublicProjectForView', message: 'error', error }); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/public/search.test.ts b/api/src/paths/public/search.test.ts deleted file mode 100644 index 1486e6a0bc..0000000000 --- a/api/src/paths/public/search.test.ts +++ /dev/null @@ -1,140 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; -import public_queries from '../../queries/public'; -import { getMockDBConnection } from '../../__mocks__/db'; -import * as search from './search'; - -chai.use(sinonChai); - -describe('search', () => { - const dbConnectionObj = getMockDBConnection(); - - const sampleReq = { - keycloak_token: {} - } as any; - - let actualResult: any = null; - - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; - - describe('getSearchResults', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when fails to get sql statement', async () => { - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - sinon.stub(public_queries, 'getPublicSpatialSearchResultsSQL').returns(null); - - try { - const result = search.getSearchResults(); - - await result(sampleReq, (null as unknown) as any, (null as unknown) as any); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should return null when no response returned from getPublicSpatialSearchResultsSQL', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: null }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - sinon.stub(public_queries, 'getPublicSpatialSearchResultsSQL').returns(SQL`something`); - - const result = search.getSearchResults(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.equal(null); - }); - - it('should return rows on success when result is empty', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: [] }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - sinon.stub(public_queries, 'getPublicSpatialSearchResultsSQL').returns(SQL`something`); - - const result = search.getSearchResults(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql([]); - }); - - it('should return rows on success', async () => { - const searchList = [ - { - id: 1, - name: 'name', - geometry: '{"type":"Point","coordinates":[50.7,60.9]}' - } - ]; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ rows: searchList }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - }, - query: mockQuery - }); - sinon.stub(public_queries, 'getPublicSpatialSearchResultsSQL').returns(SQL`something`); - - const result = search.getSearchResults(); - - await result(sampleReq, sampleRes as any, (null as unknown) as any); - - expect(actualResult).to.eql([ - { - id: searchList[0].id, - name: searchList[0].name, - geometry: [ - { - type: 'Point', - coordinates: [50.7, 60.9] - } - ] - } - ]); - }); - }); -}); diff --git a/api/src/paths/public/search.ts b/api/src/paths/public/search.ts deleted file mode 100644 index 830e439b64..0000000000 --- a/api/src/paths/public/search.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { getAPIUserDBConnection } from '../../database/db'; -import { HTTP400 } from '../../errors/custom-error'; -import { searchResponseObject } from '../../openapi/schemas/search'; -import { queries } from '../../queries/queries'; -import { getLogger } from '../../utils/logger'; -import { _extractResults } from '../search'; - -const defaultLog = getLogger('paths/public/search'); - -export const GET: Operation = [getSearchResults()]; - -GET.apiDoc = { - description: 'Gets a list of published project geometries for public view', - tags: ['projects'], - responses: { - 200: { - description: 'Spatial search response object.', - content: { - 'application/json': { - schema: { - type: 'array', - items: { - ...(searchResponseObject as object) - } - } - } - } - }, - 401: { - $ref: '#/components/responses/401' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -/** - * Get search results for public view (spatially based on boundary). - * - * @returns {RequestHandler} - */ -export function getSearchResults(): RequestHandler { - return async (req, res) => { - const connection = getAPIUserDBConnection(); - - try { - const getSpatialSearchResultsSQLStatement = queries.public.getPublicSpatialSearchResultsSQL(); - - if (!getSpatialSearchResultsSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - await connection.open(); - - const response = await connection.query( - getSpatialSearchResultsSQLStatement.text, - getSpatialSearchResultsSQLStatement.values - ); - - await connection.commit(); - - if (!response || !response.rows) { - return res.status(200).json(null); - } - - const result: any[] = _extractResults(response.rows); - - return res.status(200).json(result); - } catch (error) { - defaultLog.error({ label: 'getSearchResults', message: 'error', error }); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/resources/list.test.ts b/api/src/paths/resources/list.test.ts new file mode 100644 index 0000000000..0a63dd2c7a --- /dev/null +++ b/api/src/paths/resources/list.test.ts @@ -0,0 +1,342 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import OpenAPIResponseValidator, { OpenAPIResponseValidatorArgs } from 'openapi-response-validator'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { HTTPError } from '../../errors/http-error'; +import * as fileUtils from '../../utils/file-utils'; +import { getRequestHandlerMocks } from '../../__mocks__/db'; +import { GET, listResources } from './list'; + +chai.use(sinonChai); + +describe('listResources', () => { + beforeEach(() => { + process.env.OBJECT_STORE_URL = 's3.host.example.com'; + process.env.OBJECT_STORE_BUCKET_NAME = 'test-bucket'; + }); + + afterEach(() => { + sinon.restore(); + }); + + it('returns an empty array if no resources are found', async () => { + const listFilesStub = sinon.stub(fileUtils, 'listFilesFromS3').resolves({ + Contents: [] + }); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + const requestHandler = listResources(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(listFilesStub).to.have.been.calledWith('templates/Current'); + expect(mockRes.jsonValue).to.eql({ files: [] }); + expect(mockRes.statusValue).to.equal(200); + }); + + it('returns an array of resources', async () => { + const mockMetadata = { + ['key1']: { + 'template-name': 'name1', + 'template-type': 'type1', + species: 'species1' + }, + ['key2']: { + 'template-name': 'name2', + 'template-type': 'type2', + species: 'species2' + }, + ['key3']: { + 'template-name': 'name3', + 'template-type': 'type3', + species: 'species3' + } + }; + + sinon.stub(fileUtils, 'getObjectMeta').callsFake((key: string) => { + return Promise.resolve({ + Metadata: mockMetadata[key] + }); + }); + + const listFilesStub = sinon.stub(fileUtils, 'listFilesFromS3').resolves({ + Contents: [ + { + Key: 'key1', + LastModified: new Date('2023-01-01'), + Size: 5 + }, + { + Key: 'key2', + LastModified: new Date('2023-01-02'), + Size: 10 + }, + { + Key: 'key3', + LastModified: new Date('2023-01-03'), + Size: 15 + } + ] + }); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + const requestHandler = listResources(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(listFilesStub).to.have.been.calledWith('templates/Current'); + expect(mockRes.jsonValue).to.eql({ + files: [ + { + fileName: 'key1', + url: 's3.host.example.com/test-bucket/key1', + lastModified: new Date('2023-01-01').toISOString(), + fileSize: 5, + metadata: { + templateName: 'name1', + templateType: 'type1', + species: 'species1' + } + }, + { + fileName: 'key2', + url: 's3.host.example.com/test-bucket/key2', + lastModified: new Date('2023-01-02').toISOString(), + fileSize: 10, + metadata: { + templateName: 'name2', + templateType: 'type2', + species: 'species2' + } + }, + { + fileName: 'key3', + url: 's3.host.example.com/test-bucket/key3', + lastModified: new Date('2023-01-03').toISOString(), + fileSize: 15, + metadata: { + templateName: 'name3', + templateType: 'type3', + species: 'species3' + } + } + ] + }); + expect(mockRes.statusValue).to.equal(200); + }); + + it('should filter out directories from the s3 list respones', async () => { + sinon.stub(fileUtils, 'getObjectMeta').resolves({}); + + const listFilesStub = sinon.stub(fileUtils, 'listFilesFromS3').resolves({ + Contents: [ + { + Key: 'templates/Current/' + } + ] + }); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + const requestHandler = listResources(); + + await requestHandler(mockReq, mockRes, mockNext); + + expect(listFilesStub).to.have.been.calledWith('templates/Current'); + expect(mockRes.jsonValue).to.eql({ files: [] }); + expect(mockRes.statusValue).to.equal(200); + }); + + it('catches error, and re-throws error', async () => { + sinon.stub(fileUtils, 'listFilesFromS3').rejects(new Error('an error occurred')); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + try { + const requestHandler = listResources(); + + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).message).to.equal('an error occurred'); + } + }); + + describe('openApiSchema', () => { + describe('response validation', () => { + const responseValidator = new OpenAPIResponseValidator((GET.apiDoc as unknown) as OpenAPIResponseValidatorArgs); + + describe('should succeed when', () => { + it('returns an empty response', async () => { + const apiResponse = { files: [] }; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response).to.equal(undefined); + }); + + it('optional values are not included', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + fileName: 'string1', + lastModified: 'string1', + fileSize: 0, + metadata: {} + } + ] + }; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response).to.equal(undefined); + }); + + it('optional values are valid', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + fileName: 'string1', + lastModified: 'string1', + fileSize: 0, + metadata: { + templateName: 'string1', + templateType: 'string1', + species: 'string1' + } + } + ] + }; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response).to.equal(undefined); + }); + }); + + describe('should fail when', () => { + it('returns a null response', async () => { + const apiResponse = null; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be object'); + }); + + it('file has no fileName', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + lastModified: 'string1', + fileSize: 0, + metadata: {} + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].message).to.equal("must have required property 'fileName'"); + expect(response.errors[0].path).to.equal('files/0'); + }); + + it('file has no url', async () => { + const apiResponse = { + files: [ + { + fileName: 'string1', + lastModified: 'string1', + fileSize: 0, + metadata: {} + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].message).to.equal("must have required property 'url'"); + expect(response.errors[0].path).to.equal('files/0'); + }); + + it('file has no lastModified', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + fileName: 'string1', + fileSize: 0, + metadata: {} + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].message).to.equal("must have required property 'lastModified'"); + expect(response.errors[0].path).to.equal('files/0'); + }); + + it('file has no fileSize', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + fileName: 'string1', + lastModified: 'string1', + metadata: {} + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].message).to.equal("must have required property 'fileSize'"); + expect(response.errors[0].path).to.equal('files/0'); + }); + + it('fileSize is not a number', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + fileName: 'string1', + lastModified: 'string1', + fileSize: '100 kB', + metadata: {} + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].message).to.equal('must be number'); + expect(response.errors[0].path).to.equal('files/0/fileSize'); + }); + + it('file has no metadata', async () => { + const apiResponse = { + files: [ + { + url: 'string1', + lastModified: 'string1', + fileName: 'string1', + fileSize: 0 + } + ] + }; + + const response = responseValidator.validateResponse(200, apiResponse); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors.length).to.equal(1); + expect(response.errors[0].message).to.equal("must have required property 'metadata'"); + expect(response.errors[0].path).to.equal('files/0'); + }); + }); + }); + }); +}); diff --git a/api/src/paths/resources/list.ts b/api/src/paths/resources/list.ts new file mode 100644 index 0000000000..326a3a983f --- /dev/null +++ b/api/src/paths/resources/list.ts @@ -0,0 +1,129 @@ +import { Object as S3Object } from 'aws-sdk/clients/s3'; +import { RequestHandler } from 'express'; +import { Operation } from 'express-openapi'; +import { getObjectMeta, getS3HostUrl, listFilesFromS3 } from '../../utils/file-utils'; +import { getLogger } from '../../utils/logger'; + +const defaultLog = getLogger('paths/resources/list'); + +const CURRENT_TEMPLATES_PATH = 'templates/Current'; + +export const GET: Operation = [listResources()]; + +GET.apiDoc = { + description: 'Lists all resources.', + tags: ['resources'], + responses: { + 200: { + description: 'Resources response object.', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + files: { + type: 'array', + items: { + type: 'object', + required: ['fileName', 'url', 'lastModified', 'fileSize', 'metadata'], + properties: { + fileName: { + type: 'string' + }, + url: { + type: 'string' + }, + lastModified: { + oneOf: [{ type: 'string', format: 'date' }, { type: 'object' }] + }, + fileSize: { + type: 'number' + }, + metadata: { + type: 'object', + properties: { + species: { + type: 'string' + }, + templateName: { + type: 'string' + }, + templateType: { + type: 'string' + } + } + } + } + } + } + } + } + } + } + }, + 400: { + $ref: '#/components/responses/400' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; + +/** + * List resources. + * + * @returns {RequestHandler} + */ +export function listResources(): RequestHandler { + return async (_req, res) => { + defaultLog.debug({ label: 'listResources' }); + + try { + const response = await listFilesFromS3(CURRENT_TEMPLATES_PATH); + + /** + * Filters directories from the list files response, then maps them to an array of promises + * which fetch the metadata for each object in the list. + */ + const filePromises = (response?.Contents || []) + .filter((file: S3Object) => !file.Key?.endsWith('/')) + .map(async (file: S3Object) => { + let metadata = {}; + let fileName = ''; + + if (file.Key) { + const metaResponse = await getObjectMeta(file.Key); + + // Trim path name and/or leading '/' character(s) + fileName = file.Key.replace(new RegExp(`^${CURRENT_TEMPLATES_PATH}/*`), ''); + + metadata = { + species: metaResponse?.Metadata?.['species'], + templateName: metaResponse?.Metadata?.['template-name'], + templateType: metaResponse?.Metadata?.['template-type'] + }; + } + + return { + fileName, + url: getS3HostUrl(file.Key), + lastModified: file.LastModified?.toISOString() || null, + fileSize: file.Size, + metadata + }; + }); + + // Resolve all promises before returning the result + const files = await Promise.all(filePromises); + + res.status(200).json({ files }); + } catch (error) { + defaultLog.error({ label: 'listResources', message: 'error', error }); + throw error; + } + }; +} diff --git a/api/src/paths/search.test.ts b/api/src/paths/search.test.ts index 4a2d0f1009..b80b83fb69 100644 --- a/api/src/paths/search.test.ts +++ b/api/src/paths/search.test.ts @@ -5,7 +5,7 @@ import sinonChai from 'sinon-chai'; import SQL from 'sql-template-strings'; import { SYSTEM_ROLE } from '../constants/roles'; import * as db from '../database/db'; -import { HTTPError } from '../errors/custom-error'; +import { HTTPError } from '../errors/http-error'; import search_queries from '../queries/search'; import * as authorization from '../request-handlers/security/authorization'; import { getMockDBConnection } from '../__mocks__/db'; diff --git a/api/src/paths/search.ts b/api/src/paths/search.ts index 43aa371a37..bf9542d68e 100644 --- a/api/src/paths/search.ts +++ b/api/src/paths/search.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../constants/roles'; import { getDBConnection } from '../database/db'; -import { HTTP400 } from '../errors/custom-error'; +import { HTTP400 } from '../errors/http-error'; import { searchResponseObject } from '../openapi/schemas/search'; import { queries } from '../queries/queries'; import { authorizeRequestHandler, userHasValidRole } from '../request-handlers/security/authorization'; @@ -24,7 +24,7 @@ export const GET: Operation = [ ]; GET.apiDoc = { - description: 'Gets a list of published project geometries for given systemUserId', + description: 'Gets a list of project geometries for given systemUserId', tags: ['projects'], security: [ { diff --git a/api/src/paths/taxonomy/species/list.test.ts b/api/src/paths/taxonomy/species/list.test.ts index 273c25d3ca..8a6f571ba3 100644 --- a/api/src/paths/taxonomy/species/list.test.ts +++ b/api/src/paths/taxonomy/species/list.test.ts @@ -4,7 +4,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; +import { HTTPError } from '../../../errors/http-error'; import { TaxonomyService } from '../../../services/taxonomy-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../__mocks__/db'; import { GET, getSpeciesFromIds } from './list'; diff --git a/api/src/paths/taxonomy/species/search.test.ts b/api/src/paths/taxonomy/species/search.test.ts index 0f4c9ccca1..42749aa5b2 100644 --- a/api/src/paths/taxonomy/species/search.test.ts +++ b/api/src/paths/taxonomy/species/search.test.ts @@ -4,7 +4,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; +import { HTTPError } from '../../../errors/http-error'; import { TaxonomyService } from '../../../services/taxonomy-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../__mocks__/db'; import { GET, searchSpecies } from './search'; diff --git a/api/src/paths/taxonomy/species/search.ts b/api/src/paths/taxonomy/species/search.ts index 707f933f20..67155601f0 100644 --- a/api/src/paths/taxonomy/species/search.ts +++ b/api/src/paths/taxonomy/species/search.ts @@ -73,8 +73,8 @@ export function searchSpecies(): RequestHandler { const term = String(req.query.terms) || ''; try { - const taxonomySearch = new TaxonomyService(); - const response = await taxonomySearch.searchSpecies(term.toLowerCase()); + const taxonomyService = new TaxonomyService(); + const response = await taxonomyService.searchSpecies(term.toLowerCase()); res.status(200).json({ searchResponse: response }); } catch (error) { diff --git a/api/src/paths/user/add.test.ts b/api/src/paths/user/add.test.ts index 594633bb58..e8461fe867 100644 --- a/api/src/paths/user/add.test.ts +++ b/api/src/paths/user/add.test.ts @@ -4,7 +4,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import { SYSTEM_IDENTITY_SOURCE } from '../../constants/database'; import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; +import { HTTPError } from '../../errors/http-error'; import { UserObject } from '../../models/user'; import { UserService } from '../../services/user-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; @@ -46,6 +46,7 @@ describe('user', () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.body = { + userGuid: 'aaaa', identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, roleId: 1 }; @@ -69,6 +70,7 @@ describe('user', () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.body = { + userGuid: 'aaaa', userIdentifier: 'username', roleId: 1 }; @@ -92,6 +94,7 @@ describe('user', () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.body = { + userGuid: 'aaaa', userIdentifier: 'username', identitySource: SYSTEM_IDENTITY_SOURCE.IDIR }; @@ -115,6 +118,7 @@ describe('user', () => { const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); mockReq.body = { + userGuid: 'aaaa', userIdentifier: 'username', identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, roleId: 1 @@ -123,6 +127,8 @@ describe('user', () => { const mockUserObject: UserObject = { id: 1, user_identifier: '', + user_guid: '', + identity_source: '', record_end_date: '', role_ids: [1], role_names: [] @@ -139,5 +145,39 @@ describe('user', () => { expect(ensureSystemUserStub).to.have.been.calledOnce; expect(adduserSystemRolesStub).to.have.been.calledOnce; }); + + it('should success when no userGuid', async () => { + const dbConnectionObj = getMockDBConnection(); + + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + + mockReq.body = { + identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, + userIdentifier: 'username', + roleId: 1 + }; + + const mockUserObject: UserObject = { + id: 1, + user_identifier: '', + user_guid: null, + identity_source: '', + record_end_date: '', + role_ids: [1], + role_names: [] + }; + + const ensureSystemUserStub = sinon.stub(UserService.prototype, 'ensureSystemUser').resolves(mockUserObject); + + const adduserSystemRolesStub = sinon.stub(UserService.prototype, 'addUserSystemRoles'); + + const requestHandler = user.addSystemRoleUser(); + + await requestHandler(mockReq, mockRes, mockNext); + expect(ensureSystemUserStub).to.have.been.calledOnce; + expect(adduserSystemRolesStub).to.have.been.calledOnce; + }); }); }); diff --git a/api/src/paths/user/add.ts b/api/src/paths/user/add.ts index 1da773b6bc..336d1ced52 100644 --- a/api/src/paths/user/add.ts +++ b/api/src/paths/user/add.ts @@ -3,7 +3,7 @@ import { Operation } from 'express-openapi'; import { SYSTEM_IDENTITY_SOURCE } from '../../constants/database'; import { SYSTEM_ROLE } from '../../constants/roles'; import { getDBConnection } from '../../database/db'; -import { HTTP400 } from '../../errors/custom-error'; +import { HTTP400 } from '../../errors/http-error'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; import { UserService } from '../../services/user-service'; import { getLogger } from '../../utils/logger'; @@ -41,12 +41,21 @@ POST.apiDoc = { type: 'object', required: ['userIdentifier', 'identitySource', 'roleId'], properties: { + userGuid: { + type: 'string', + description: 'The GUID for the user.' + }, userIdentifier: { - type: 'string' + type: 'string', + description: 'The identifier for the user.' }, identitySource: { type: 'string', - enum: [SYSTEM_IDENTITY_SOURCE.IDIR, SYSTEM_IDENTITY_SOURCE.BCEID] + enum: [ + SYSTEM_IDENTITY_SOURCE.IDIR, + SYSTEM_IDENTITY_SOURCE.BCEID_BASIC, + SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS + ] }, roleId: { type: 'number', @@ -88,8 +97,9 @@ export function addSystemRoleUser(): RequestHandler { return async (req, res) => { const connection = getDBConnection(req['keycloak_token']); - const userIdentifier = req.body?.userIdentifier || null; - const identitySource = req.body?.identitySource || null; + const userGuid: string | null = req.body?.userGuid || null; + const userIdentifier: string | null = req.body?.userIdentifier || null; + const identitySource: string | null = req.body?.identitySource || null; const roleId = req.body?.roleId || null; @@ -110,7 +120,7 @@ export function addSystemRoleUser(): RequestHandler { const userService = new UserService(connection); - const userObject = await userService.ensureSystemUser(userIdentifier, identitySource); + const userObject = await userService.ensureSystemUser(userGuid, userIdentifier, identitySource); if (userObject) { await userService.addUserSystemRoles(userObject.id, [roleId]); diff --git a/api/src/paths/user/list.test.ts b/api/src/paths/user/list.test.ts index 96dc3372a5..0c8f748700 100644 --- a/api/src/paths/user/list.test.ts +++ b/api/src/paths/user/list.test.ts @@ -26,6 +26,8 @@ describe('users', () => { { id: 1, user_identifier: 'identifier', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['System Admin', 'Project Lead'] diff --git a/api/src/paths/user/list.ts b/api/src/paths/user/list.ts index 615b691dd4..3636b28f9a 100644 --- a/api/src/paths/user/list.ts +++ b/api/src/paths/user/list.ts @@ -40,13 +40,23 @@ GET.apiDoc = { items: { title: 'User Response Object', type: 'object', + required: ['id', 'user_identifier', 'identity_source', 'role_ids', 'role_names'], properties: { id: { type: 'number' }, + user_guid: { + type: 'string', + description: 'The GUID for the user.', + nullable: true + }, user_identifier: { type: 'string' }, + identity_source: { + type: 'string', + description: 'The identity source of the user' + }, role_ids: { type: 'array', items: { diff --git a/api/src/paths/user/self.test.ts b/api/src/paths/user/self.test.ts index 3f669d9c3d..ce6e006c59 100644 --- a/api/src/paths/user/self.test.ts +++ b/api/src/paths/user/self.test.ts @@ -3,7 +3,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; +import { HTTPError } from '../../errors/http-error'; import { UserService } from '../../services/user-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; import * as self from './self'; @@ -40,7 +40,7 @@ describe('getUser', () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(UserService.prototype, 'getUserById').resolves(null); + sinon.stub(UserService.prototype, 'getUserById').resolves(undefined); try { const requestHandler = self.getUser(); @@ -63,6 +63,8 @@ describe('getUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'identifier', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['role 1', 'role 2'] diff --git a/api/src/paths/user/self.ts b/api/src/paths/user/self.ts index 5b4f921e25..09cb77a283 100644 --- a/api/src/paths/user/self.ts +++ b/api/src/paths/user/self.ts @@ -1,7 +1,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { getDBConnection } from '../../database/db'; -import { HTTP400 } from '../../errors/custom-error'; +import { HTTP400 } from '../../errors/http-error'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; import { UserService } from '../../services/user-service'; import { getLogger } from '../../utils/logger'; @@ -47,6 +47,10 @@ GET.apiDoc = { description: 'The unique user identifier', type: 'string' }, + user_guid: { + type: 'string', + description: 'The GUID for the user.' + }, record_end_date: { oneOf: [{ type: 'object' }, { type: 'string', format: 'date' }], description: 'Determines if the user record has expired', diff --git a/api/src/paths/user/{userId}/delete.test.ts b/api/src/paths/user/{userId}/delete.test.ts index 6736ac6cbf..0a8bdf3164 100644 --- a/api/src/paths/user/{userId}/delete.test.ts +++ b/api/src/paths/user/{userId}/delete.test.ts @@ -4,7 +4,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import SQL from 'sql-template-strings'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; +import { HTTPError } from '../../../errors/http-error'; import project_participation_queries from '../../../queries/project-participation'; import user_queries from '../../../queries/users'; import { UserService } from '../../../services/user-service'; @@ -127,7 +127,7 @@ describe('removeSystemUser', () => { sinon.stub(delete_endpoint, 'checkIfUserIsOnlyProjectLeadOnAnyProject').resolves(); - sinon.stub(UserService.prototype, 'getUserById').resolves(null); + sinon.stub(UserService.prototype, 'getUserById').throws; try { const requestHandler = delete_endpoint.removeSystemUser(); @@ -155,6 +155,8 @@ describe('removeSystemUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'testname', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '2010-10-10', role_ids: [1, 2], role_names: ['role 1', 'role 2'] @@ -187,6 +189,8 @@ describe('removeSystemUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'testname', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['role 1', 'role 2'] @@ -222,6 +226,8 @@ describe('removeSystemUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'testname', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['role 1', 'role 2'] @@ -255,6 +261,8 @@ describe('removeSystemUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'testname', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['role 1', 'role 2'] @@ -290,6 +298,8 @@ describe('removeSystemUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'testname', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['role 1', 'role 2'] @@ -326,6 +336,8 @@ describe('removeSystemUser', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'testname', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [1, 2], role_names: ['role 1', 'role 2'] diff --git a/api/src/paths/user/{userId}/delete.ts b/api/src/paths/user/{userId}/delete.ts index 08ccb56674..fbea430cad 100644 --- a/api/src/paths/user/{userId}/delete.ts +++ b/api/src/paths/user/{userId}/delete.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE, SYSTEM_ROLE } from '../../../constants/roles'; import { getDBConnection, IDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; +import { HTTP400 } from '../../../errors/http-error'; import { queries } from '../../../queries/queries'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; import { UserService } from '../../../services/user-service'; diff --git a/api/src/paths/user/{userId}/get.test.ts b/api/src/paths/user/{userId}/get.test.ts index bfdbea856a..db9bac8035 100644 --- a/api/src/paths/user/{userId}/get.test.ts +++ b/api/src/paths/user/{userId}/get.test.ts @@ -3,7 +3,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../database/db'; -import { HTTPError } from '../../../errors/custom-error'; +import { HTTPError } from '../../../errors/http-error'; import { UserService } from '../../../services/user-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../__mocks__/db'; import * as user from './get'; @@ -49,7 +49,7 @@ describe('user', () => { userId: '1' }; - sinon.stub(UserService.prototype, 'getUserById').resolves(null); + sinon.stub(UserService.prototype, 'getUserById').resolves(undefined); try { const requestHandler = user.getUserById(); @@ -75,10 +75,12 @@ describe('user', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, - user_identifier: 'user_identifier', + identity_source: 'idir', record_end_date: '', role_ids: [], - role_names: [] + role_names: [], + user_guid: 'aaaa', + user_identifier: 'user_identifier' }); const requestHandler = user.getUserById(); @@ -87,10 +89,12 @@ describe('user', () => { expect(mockRes.jsonValue).to.eql({ id: 1, - user_identifier: 'user_identifier', + identity_source: 'idir', record_end_date: '', role_ids: [], - role_names: [] + role_names: [], + user_guid: 'aaaa', + user_identifier: 'user_identifier' }); }); }); diff --git a/api/src/paths/user/{userId}/get.ts b/api/src/paths/user/{userId}/get.ts index 9704014279..bfe416ed97 100644 --- a/api/src/paths/user/{userId}/get.ts +++ b/api/src/paths/user/{userId}/get.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../../../constants/roles'; import { getDBConnection } from '../../../database/db'; -import { HTTP400 } from '../../../errors/custom-error'; +import { HTTP400 } from '../../../errors/http-error'; import { authorizeRequestHandler } from '../../../request-handlers/security/authorization'; import { UserService } from '../../../services/user-service'; import { getLogger } from '../../../utils/logger'; @@ -58,6 +58,10 @@ GET.apiDoc = { description: 'The unique user identifier', type: 'string' }, + user_guid: { + type: 'string', + description: 'The GUID for the user.' + }, record_end_date: { oneOf: [{ type: 'object' }, { type: 'string', format: 'date' }], nullable: true, diff --git a/api/src/paths/user/{userId}/projects/get.test.ts b/api/src/paths/user/{userId}/projects/get.test.ts index 6f2d25a998..d3b819b1af 100644 --- a/api/src/paths/user/{userId}/projects/get.test.ts +++ b/api/src/paths/user/{userId}/projects/get.test.ts @@ -4,7 +4,7 @@ import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import SQL from 'sql-template-strings'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; +import { HTTPError } from '../../../../errors/http-error'; import project_participation_queries from '../../../../queries/project-participation'; import { getMockDBConnection } from '../../../../__mocks__/db'; import * as projects from './get'; diff --git a/api/src/paths/user/{userId}/projects/get.ts b/api/src/paths/user/{userId}/projects/get.ts index 91a16a76c8..e55a02a1b9 100644 --- a/api/src/paths/user/{userId}/projects/get.ts +++ b/api/src/paths/user/{userId}/projects/get.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../../../../constants/roles'; import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; +import { HTTP400 } from '../../../../errors/http-error'; import { queries } from '../../../../queries/queries'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; import { getLogger } from '../../../../utils/logger'; diff --git a/api/src/paths/user/{userId}/system-roles/create.test.ts b/api/src/paths/user/{userId}/system-roles/create.test.ts deleted file mode 100644 index e8051d4626..0000000000 --- a/api/src/paths/user/{userId}/system-roles/create.test.ts +++ /dev/null @@ -1,216 +0,0 @@ -import chai, { expect } from 'chai'; -import { describe } from 'mocha'; -import sinon from 'sinon'; -import sinonChai from 'sinon-chai'; -import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; -import { UserService } from '../../../../services/user-service'; -import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; -import * as system_roles from './create'; - -chai.use(sinonChai); - -describe('getAddSystemRolesHandler', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when missing required path param: userId', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - userId: '' - }; - mockReq.body = { - roles: [1] - }; - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const requestHandler = system_roles.getAddSystemRolesHandler(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required path param: userId'); - } - }); - - it('should throw a 400 error when missing roles in request body', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - userId: '1' - }; - mockReq.body = { - roles: null - }; - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - systemUserId: () => { - return 20; - } - }); - - try { - const requestHandler = system_roles.getAddSystemRolesHandler(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing required body param: roles'); - } - }); - - it('should throw a 400 error when no system user found', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - userId: '1' - }; - mockReq.body = { - roles: [1] - }; - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - sinon.stub(UserService.prototype, 'getUserById').resolves(null); - - try { - const requestHandler = system_roles.getAddSystemRolesHandler(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get system user'); - } - }); - - it('re-throws the error thrown by UserService.addUserSystemRoles', async () => { - const dbConnectionObj = getMockDBConnection(); - - sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - userId: '1' - }; - mockReq.body = { - roles: [1] - }; - - sinon.stub(UserService.prototype, 'getUserById').resolves({ - id: 1, - user_identifier: 'test name', - record_end_date: '', - role_ids: [11, 22], - role_names: ['role 11', 'role 22'] - }); - - sinon.stub(UserService.prototype, 'addUserSystemRoles').rejects(new Error('add user error')); - - try { - const requestHandler = system_roles.getAddSystemRolesHandler(); - - await requestHandler(mockReq, mockRes, mockNext); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('add user error'); - } - }); - - it('should send a 200 on success (when user has existing roles)', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - userId: '1' - }; - mockReq.body = { - roles: [1] - }; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(UserService.prototype, 'getUserById').resolves({ - id: 1, - user_identifier: 'test name', - record_end_date: '', - role_ids: [1, 2], - role_names: ['role 1', 'role 2'] - }); - - const requestHandler = system_roles.getAddSystemRolesHandler(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.statusValue).to.equal(200); - }); - - it('should send a 200 on success (when user has no existing roles)', async () => { - const dbConnectionObj = getMockDBConnection(); - - const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); - - mockReq.params = { - userId: '1' - }; - mockReq.body = { - roles: [1] - }; - - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rowCount: 1 - }); - - sinon.stub(db, 'getDBConnection').returns({ - ...dbConnectionObj, - query: mockQuery - }); - - sinon.stub(UserService.prototype, 'getUserById').resolves({ - id: 1, - user_identifier: 'test name', - record_end_date: '', - role_ids: [], - role_names: ['role 11', 'role 22'] - }); - - sinon.stub(UserService.prototype, 'addUserSystemRoles').resolves(); - - const requestHandler = system_roles.getAddSystemRolesHandler(); - - await requestHandler(mockReq, mockRes, mockNext); - - expect(mockRes.statusValue).to.equal(200); - }); -}); diff --git a/api/src/paths/user/{userId}/system-roles/create.ts b/api/src/paths/user/{userId}/system-roles/create.ts deleted file mode 100644 index 104ef7eacf..0000000000 --- a/api/src/paths/user/{userId}/system-roles/create.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { RequestHandler } from 'express'; -import { Operation } from 'express-openapi'; -import { SYSTEM_ROLE } from '../../../../constants/roles'; -import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; -import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; -import { UserService } from '../../../../services/user-service'; -import { getLogger } from '../../../../utils/logger'; - -const defaultLog = getLogger('paths/user/{userId}/system-roles/create'); - -export const POST: Operation = [ - authorizeRequestHandler(() => { - return { - and: [ - { - validSystemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN], - discriminator: 'SystemRole' - } - ] - }; - }), - getAddSystemRolesHandler() -]; - -POST.apiDoc = { - description: 'Add system roles to a user.', - tags: ['user'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'userId', - schema: { - type: 'number' - }, - required: true - } - ], - requestBody: { - description: 'Add system roles to a user request object.', - content: { - 'application/json': { - schema: { - type: 'object', - required: ['roles'], - properties: { - roles: { - type: 'array', - items: { - type: 'number' - }, - description: 'An array of role ids' - } - } - } - } - } - }, - responses: { - 200: { - description: 'Add system user roles to user OK.' - }, - 400: { - $ref: '#/components/responses/400' - }, - 401: { - $ref: '#/components/responses/401' - }, - 403: { - $ref: '#/components/responses/401' - }, - 500: { - $ref: '#/components/responses/500' - }, - default: { - $ref: '#/components/responses/default' - } - } -}; - -export function getAddSystemRolesHandler(): RequestHandler { - return async (req, res) => { - defaultLog.debug({ - label: 'getAddSystemRolesHandler', - message: 'params', - req_params: req.params, - req_body: req.body - }); - - if (!req.params || !req.params.userId) { - throw new HTTP400('Missing required path param: userId'); - } - - if (!req.body || !req.body.roles || !req.body.roles.length) { - throw new HTTP400('Missing required body param: roles'); - } - - const userId = Number(req.params.userId); - const roles: number[] = req.body.roles; - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - const userService = new UserService(connection); - - const userObject = await userService.getUserById(userId); - - if (!userObject) { - throw new HTTP400('Failed to get system user'); - } - - // Filter out any system roles that have already been added to the user - const rolesToAdd = roles.filter((role) => !userObject.role_ids.includes(role)); - - if (!rolesToAdd.length) { - // No new system roles to add, do nothing - return res.status(200).send(); - } - - await userService.addUserSystemRoles(userId, roles); - - await connection.commit(); - - return res.status(200).send(); - } catch (error) { - defaultLog.error({ label: 'getAddSystemRolesHandler', message: 'error', error }); - throw error; - } finally { - connection.release(); - } - }; -} diff --git a/api/src/paths/user/{userId}/system-roles/update.test.ts b/api/src/paths/user/{userId}/system-roles/update.test.ts index 9ad22c0b2a..47b8ff0714 100644 --- a/api/src/paths/user/{userId}/system-roles/update.test.ts +++ b/api/src/paths/user/{userId}/system-roles/update.test.ts @@ -3,7 +3,7 @@ import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; import * as db from '../../../../database/db'; -import { HTTPError } from '../../../../errors/custom-error'; +import { HTTPError } from '../../../../errors/http-error'; import { UserService } from '../../../../services/user-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../../../__mocks__/db'; import * as system_roles from './update'; @@ -79,7 +79,7 @@ describe('updateSystemRolesHandler', () => { sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); - sinon.stub(UserService.prototype, 'getUserById').resolves(null); + sinon.stub(UserService.prototype, 'getUserById').resolves(undefined); try { const requestHandler = system_roles.updateSystemRolesHandler(); @@ -109,6 +109,8 @@ describe('updateSystemRolesHandler', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'test name', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [11, 22], role_names: ['role 11', 'role 22'] @@ -154,6 +156,8 @@ describe('updateSystemRolesHandler', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'test name', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [11, 22], role_names: ['role 11', 'role 22'] @@ -189,6 +193,8 @@ describe('updateSystemRolesHandler', () => { sinon.stub(UserService.prototype, 'getUserById').resolves({ id: 1, user_identifier: 'test name', + user_guid: 'aaaa', + identity_source: 'idir', record_end_date: '', role_ids: [11, 22], role_names: ['role 1', 'role 2'] @@ -228,9 +234,15 @@ describe('updateSystemRolesHandler', () => { query: mockQuery }); - sinon - .stub(UserService.prototype, 'getUserById') - .resolves({ id: 1, user_identifier: 'test name', record_end_date: '', role_ids: [], role_names: [] }); + sinon.stub(UserService.prototype, 'getUserById').resolves({ + id: 1, + user_identifier: 'test name', + user_guid: 'aaaa', + identity_source: 'idir', + record_end_date: '', + role_ids: [], + role_names: [] + }); const deleteUserSystemRolesStub = sinon.stub(UserService.prototype, 'deleteUserSystemRoles').resolves(); sinon.stub(UserService.prototype, 'addUserSystemRoles').resolves(); diff --git a/api/src/paths/user/{userId}/system-roles/update.ts b/api/src/paths/user/{userId}/system-roles/update.ts index 58e8db61dc..a70b11ffbd 100644 --- a/api/src/paths/user/{userId}/system-roles/update.ts +++ b/api/src/paths/user/{userId}/system-roles/update.ts @@ -2,7 +2,7 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { SYSTEM_ROLE } from '../../../../constants/roles'; import { getDBConnection } from '../../../../database/db'; -import { HTTP400 } from '../../../../errors/custom-error'; +import { HTTP400 } from '../../../../errors/http-error'; import { authorizeRequestHandler } from '../../../../request-handlers/security/authorization'; import { UserService } from '../../../../services/user-service'; import { getLogger } from '../../../../utils/logger'; diff --git a/api/src/paths/version.ts b/api/src/paths/version.ts index 9c21e4ec90..54cc5918e3 100644 --- a/api/src/paths/version.ts +++ b/api/src/paths/version.ts @@ -43,7 +43,7 @@ GET.apiDoc = { * @returns {RequestHandler} */ export function getVersionInformation(): RequestHandler { - return (req, res) => { + return async (_req, res) => { const versionInfo = { version: process.env.VERSION, environment: process.env.NODE_ENV, diff --git a/api/src/paths/xlsx/process.test.ts b/api/src/paths/xlsx/process.test.ts new file mode 100644 index 0000000000..139e2f0e25 --- /dev/null +++ b/api/src/paths/xlsx/process.test.ts @@ -0,0 +1,245 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import OpenAPIRequestValidator, { OpenAPIRequestValidatorArgs } from 'openapi-request-validator'; +import OpenAPIResponseValidator, { OpenAPIResponseValidatorArgs } from 'openapi-response-validator'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as db from '../../database/db'; +import { HTTPError } from '../../errors/http-error'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; +import * as process from './process'; +import { POST } from './process'; + +chai.use(sinonChai); + +describe('xlsx/process', () => { + describe('openApiSchema', () => { + describe('request validation', () => { + const requestValidator = new OpenAPIRequestValidator((POST.apiDoc as unknown) as OpenAPIRequestValidatorArgs); + + describe('should throw an error when', () => { + describe('request body', () => { + it('is null', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + body: {} + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('project_id'); + expect(response.errors[1].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'project_id'`); + expect(response.errors[1].message).to.equal(`must have required property 'occurrence_submission_id'`); + expect(response.errors[2]).to.be.undefined; + }); + + it('is missing required fields', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + + body: { project_id: 1 } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'occurrence_submission_id'`); + }); + + it('fields are undefined', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + + body: { project_id: undefined, occurrence_submission_id: undefined } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('project_id'); + expect(response.errors[1].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'project_id'`); + expect(response.errors[1].message).to.equal(`must have required property 'occurrence_submission_id'`); + expect(response.errors[2]).to.be.undefined; + }); + }); + + describe('project_id and occurrence_submission_id', () => { + it('have invalid type', async () => { + const request = { + headers: { 'content-type': 'application/json' }, + body: { project_id: 'not a number', occurrence_submission_id: 'not a number' } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].message).to.equal('must be number'); + expect(response.errors[1].message).to.equal('must be number'); + }); + }); + }); + + describe('should succeed when', () => { + it('required values are valid', async () => { + const request = { + headers: { 'content-type': 'application/json' }, + body: { project_id: 1, occurrence_submission_id: 2 } + }; + + const response = requestValidator.validateRequest(request); + + expect(response).to.be.undefined; + }); + }); + }); + + describe('response validation', () => { + const responseValidator = new OpenAPIResponseValidator((POST.apiDoc as unknown) as OpenAPIResponseValidatorArgs); + + describe('should succeed when', () => { + it('returns a null response', async () => { + const apiResponse = null; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be object'); + }); + + it('optional values are valid', async () => { + const apiResponse = { status: 'my status', reason: 'my_reason' }; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response).to.equal(undefined); + }); + }); + + describe('should fail when', () => { + it('optional values are invalid', async () => { + const apiResponse = { status: 1, reason: 1 }; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + }); + }); + + describe('process file', () => { + afterEach(() => { + sinon.restore(); + }); + + it('throws an error when req.body.occurrence_submission_id is empty', async () => { + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = {}; + + const requestHandler = process.processFile(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).status).to.equal(400); + expect((actualError as HTTPError).message).to.equal('Missing required parameter `occurrence field`'); + } + }); + + it('returns a 200 if req.body.occurrence_submission_id exists', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + mockReq['keycloak_token'] = 'token'; + + const processXLSXFileStub = sinon.stub(ValidationService.prototype, 'processXLSXFile').resolves(); + + const requestHandler = process.processFile(); + await requestHandler(mockReq, mockRes, mockNext); + expect(mockRes.statusValue).to.equal(200); + expect(processXLSXFileStub).to.have.been.calledOnceWith(mockReq.body.occurrence_submission_id); + expect(mockRes.jsonValue).to.eql({ status: 'success' }); + }); + + it('catches an error on processXLSXFile', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const processXLSXFileStub = sinon + .stub(ValidationService.prototype, 'processXLSXFile') + .throws(new Error('test processXLSXFile error')); + const errorServiceStub = sinon.stub(ErrorService.prototype, 'insertSubmissionStatus').resolves(); + const processDWCFileStub = sinon.stub(ValidationService.prototype, 'processDWCFile').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = process.processFile(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(processXLSXFileStub).to.have.been.calledOnceWith(mockReq.body.occurrence_submission_id); + expect(processDWCFileStub).not.to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test processXLSXFile error'); + } + }); + + it('catches an error on insertSubmissionStatus', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const processXLSXFileStub = sinon + .stub(ValidationService.prototype, 'processXLSXFile') + .throws(new Error('test processDWCFile error')); + const processDWCFileStub = sinon.stub(ValidationService.prototype, 'processDWCFile').resolves(); + + const errorServiceStub = sinon + .stub(ErrorService.prototype, 'insertSubmissionStatus') + .throws(new Error('test insertSubmissionStatus error')); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = process.processFile(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(processXLSXFileStub).to.have.been.calledOnceWith(mockReq.body.occurrence_submission_id); + expect(processDWCFileStub).not.to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test insertSubmissionStatus error'); + } + }); + }); +}); diff --git a/api/src/paths/xlsx/process.ts b/api/src/paths/xlsx/process.ts index bffef30aa2..a3c4347e47 100644 --- a/api/src/paths/xlsx/process.ts +++ b/api/src/paths/xlsx/process.ts @@ -1,25 +1,13 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../constants/roles'; +import { SUBMISSION_STATUS_TYPE } from '../../constants/status'; +import { getDBConnection } from '../../database/db'; +import { HTTP400 } from '../../errors/http-error'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; import { getLogger } from '../../utils/logger'; -import { getSubmissionOutputS3Key, scrapeAndUploadOccurrences } from '../dwc/scrape-occurrences'; -import { - getOccurrenceSubmission, - getOccurrenceSubmissionInputS3Key, - getS3File, - getValidationRules, - persistParseErrors, - persistValidationResults, - prepDWCArchive -} from '../dwc/validate'; -import { - getTransformationRules, - getTransformationSchema, - persistTransformationResults, - transformXLSX -} from './transform'; -import { getValidationSchema, prepXLSX, validateXLSX } from './validate'; const defaultLog = getLogger('paths/xlsx/process'); @@ -35,32 +23,7 @@ export const POST: Operation = [ ] }; }), - //general set up - getOccurrenceSubmission(), - getOccurrenceSubmissionInputS3Key(), - getS3File(), - prepXLSX(), - persistParseErrors(), - sendResponse(), - - //xlsx validate - getValidationSchema(), - getValidationRules(), - validateXLSX(), - persistValidationResults({ initialSubmissionStatusType: 'Template Validated' }), - - //xlsx transform functions - getTransformationSchema(), - getTransformationRules(), - transformXLSX(), - persistTransformationResults(), - - //scrape functions - getOccurrenceSubmission(), - getSubmissionOutputS3Key(), - getS3File(), - prepDWCArchive(), - scrapeAndUploadOccurrences() + processFile() ]; POST.apiDoc = { @@ -87,6 +50,9 @@ POST.apiDoc = { description: 'A survey occurrence submission ID', type: 'number', example: 1 + }, + survey_id: { + type: 'number' } } } @@ -130,10 +96,38 @@ POST.apiDoc = { } }; -export function sendResponse(): RequestHandler { - return async (_req, res, next) => { +export function processFile(): RequestHandler { + return async (req, res) => { + const submissionId = req.body.occurrence_submission_id; + const surveyId = req.body.survey_id; + if (!submissionId) { + throw new HTTP400('Missing required parameter `occurrence field`'); + } + res.status(200).json({ status: 'success' }); - defaultLog.info({ label: 'xlsx process', message: `success sent` }); - next(); + + const connection = getDBConnection(req['keycloak_token']); + try { + await connection.open(); + + const validationService = new ValidationService(connection); + + // process the raw template data + await validationService.processXLSXFile(submissionId, surveyId); + + await connection.commit(); + } catch (error) { + defaultLog.error({ label: 'xlsx process', message: 'error', error }); + // Unexpected error occurred, rolling DB back to safe state + await connection.rollback(); + + // We still want to track that the submission failed to present to the user + const errorService = new ErrorService(connection); + await errorService.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.SYSTEM_ERROR); + await connection.commit(); + throw error; + } finally { + connection.release(); + } }; } diff --git a/api/src/paths/xlsx/transform.test.ts b/api/src/paths/xlsx/transform.test.ts index 0c0dafeb7c..dc58280742 100644 --- a/api/src/paths/xlsx/transform.test.ts +++ b/api/src/paths/xlsx/transform.test.ts @@ -1,46 +1,240 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; +import OpenAPIRequestValidator, { OpenAPIRequestValidatorArgs } from 'openapi-request-validator'; +import OpenAPIResponseValidator, { OpenAPIResponseValidatorArgs } from 'openapi-response-validator'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; +import * as db from '../../database/db'; +import { HTTPError } from '../../errors/http-error'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; import * as transform from './transform'; +import { POST } from './transform'; chai.use(sinonChai); -describe('persistParseErrors', () => { - const sampleReq = { - keycloak_token: {}, - parseError: null - } as any; +describe('xlsx/transform', () => { + describe('openApiSchema', () => { + describe('request validation', () => { + const requestValidator = new OpenAPIRequestValidator((POST.apiDoc as unknown) as OpenAPIRequestValidatorArgs); - let actualResult: any = null; + describe('should throw an error when', () => { + describe('request body', () => { + it('is null', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + body: {} + }; - const sampleRes = { - status: () => { - return { - json: (result: any) => { - actualResult = result; - } - }; - } - }; + const response = requestValidator.validateRequest(request); - afterEach(() => { - sinon.restore(); - }); + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('project_id'); + expect(response.errors[1].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'project_id'`); + expect(response.errors[1].message).to.equal(`must have required property 'occurrence_submission_id'`); + expect(response.errors[2]).to.be.undefined; + }); + + it('is missing required fields', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + + body: { project_id: 1 } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'occurrence_submission_id'`); + }); + + it('fields are undefined', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + + body: { project_id: undefined, occurrence_submission_id: undefined } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('project_id'); + expect(response.errors[1].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'project_id'`); + expect(response.errors[1].message).to.equal(`must have required property 'occurrence_submission_id'`); + expect(response.errors[2]).to.be.undefined; + }); + }); + + describe('project_id and occurrence_submission_id', () => { + it('have invalid type', async () => { + const request = { + headers: { 'content-type': 'application/json' }, + body: { project_id: 'not a number', occurrence_submission_id: 'not a number' } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].message).to.equal('must be number'); + expect(response.errors[1].message).to.equal('must be number'); + }); + }); + }); + + describe('should succeed when', () => { + it('required values are valid', async () => { + const request = { + headers: { 'content-type': 'application/json' }, + body: { project_id: 1, occurrence_submission_id: 2 } + }; + + const response = requestValidator.validateRequest(request); + + expect(response).to.be.undefined; + }); + }); + }); + + describe('response validation', () => { + const responseValidator = new OpenAPIResponseValidator((POST.apiDoc as unknown) as OpenAPIResponseValidatorArgs); + + describe('should succeed when', () => { + it('returns a null response', async () => { + const apiResponse = null; + const response = responseValidator.validateResponse(200, apiResponse); + + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be object'); + }); + + it('optional values are valid', async () => { + const apiResponse = { status: 'my status', reason: 'my_reason' }; + const response = responseValidator.validateResponse(200, apiResponse); - it('should skip to next step when no errors', async () => { - const nextSpy = sinon.spy(); + expect(response).to.equal(undefined); + }); + }); - const result = transform.persistParseErrors(); - await result(sampleReq, (null as unknown) as any, nextSpy as any); + describe('should fail when', () => { + it('optional values are invalid', async () => { + const apiResponse = { status: 1, reason: 1 }; + const response = responseValidator.validateResponse(200, apiResponse); - expect(nextSpy).to.have.been.called; + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + }); }); - it('should return with a failed status if errors exist', async () => { - const result = transform.persistParseErrors(); - await result({ ...sampleReq, parseError: 'some error exists' }, sampleRes as any, (null as unknown) as any); + describe('transform file', () => { + afterEach(() => { + sinon.restore(); + }); + + it('throws an error when req.body.occurrence_submission_id is empty', async () => { + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = {}; + + const requestHandler = transform.transform(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).status).to.equal(400); + expect((actualError as HTTPError).message).to.equal('Missing required parameter `occurrence field`'); + } + }); + + it('returns a 200 if req.body.occurrence_submission_id exists', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + mockReq['keycloak_token'] = 'token'; + + const transformFileStub = sinon.stub(ValidationService.prototype, 'transformFile').resolves(); + + const requestHandler = transform.transform(); + await requestHandler(mockReq, mockRes, mockNext); + expect(mockRes.statusValue).to.equal(200); + expect(transformFileStub).to.have.been.calledOnceWith(mockReq.body.occurrence_submission_id); + expect(mockRes.jsonValue).to.eql({ status: 'success' }); + }); + + it('catches an error on transformFile', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const transformFileStub = sinon + .stub(ValidationService.prototype, 'transformFile') + .throws(new Error('test transformFile error')); + const errorServiceStub = sinon.stub(ErrorService.prototype, 'insertSubmissionStatus').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = transform.transform(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(transformFileStub).to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test transformFile error'); + } + }); + + it('catches an error on insertSubmissionStatus', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const transformFileStub = sinon + .stub(ValidationService.prototype, 'transformFile') + .throws(new Error('test transformFile error')); + const errorServiceStub = sinon + .stub(ErrorService.prototype, 'insertSubmissionStatus') + .throws(new Error('test insertSubmissionStatus error')); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = transform.transform(); - expect(actualResult).to.eql({ status: 'failed', reason: 'Unable to parse submission' }); + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(transformFileStub).to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test insertSubmissionStatus error'); + } + }); }); }); diff --git a/api/src/paths/xlsx/transform.ts b/api/src/paths/xlsx/transform.ts index aa0fc2c0ad..b49baaac00 100644 --- a/api/src/paths/xlsx/transform.ts +++ b/api/src/paths/xlsx/transform.ts @@ -1,24 +1,13 @@ -import AdmZip from 'adm-zip'; import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../constants/roles'; import { SUBMISSION_STATUS_TYPE } from '../../constants/status'; import { getDBConnection } from '../../database/db'; -import { - getOccurrenceSubmission, - getOccurrenceSubmissionInputS3Key, - getS3File, - insertSubmissionStatus, - sendResponse, - updateSurveyOccurrenceSubmissionWithOutputKey -} from '../../paths/dwc/validate'; +import { HTTP400 } from '../../errors/http-error'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; -import { uploadBufferToS3 } from '../../utils/file-utils'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; import { getLogger } from '../../utils/logger'; -import { TransformationSchemaParser } from '../../utils/media/xlsx/transformation/transformation-schema-parser'; -import { XLSXTransformation } from '../../utils/media/xlsx/transformation/xlsx-transformation'; -import { XLSXCSV } from '../../utils/media/xlsx/xlsx-file'; -import { getTemplateMethodologySpeciesRecord, prepXLSX } from './validate'; const defaultLog = getLogger('paths/xlsx/transform'); @@ -34,16 +23,7 @@ export const POST: Operation = [ ] }; }), - getOccurrenceSubmission(), - getOccurrenceSubmissionInputS3Key(), - getS3File(), - prepXLSX(), - persistParseErrors(), - getTransformationSchema(), - getTransformationRules(), - transformXLSX(), - persistTransformationResults(), - sendResponse() + transform() ]; POST.apiDoc = { @@ -69,6 +49,9 @@ POST.apiDoc = { description: 'A survey occurrence submission ID', type: 'number', example: 1 + }, + survey_id: { + type: 'number' } } } @@ -112,174 +95,36 @@ POST.apiDoc = { } }; -export function persistParseErrors(): RequestHandler { - return async (req, res, next) => { - const parseError = req['parseError']; - - if (!parseError) { - // no errors to persist, skip to next step - return next(); +export function transform(): RequestHandler { + return async (req, res) => { + const submissionId = req.body.occurrence_submission_id; + const surveyId = req.body.survey_id; + if (!submissionId) { + throw new HTTP400('Missing required parameter `occurrence field`'); } - // file is not parsable, don't continue to next step and return early - // TODO add new status for "Transformation Failed" and insert new status record? - return res.status(200).json({ status: 'failed', reason: 'Unable to parse submission' }); - }; -} - -export function getTransformationSchema(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getTransformationSchema', message: 'xlsx transform' }); + res.status(200).json({ status: 'success' }); const connection = getDBConnection(req['keycloak_token']); - try { await connection.open(); - const xlsxCsv = req['xlsx']; - const template_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_template_id; - const field_method_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_csm_id; - - const templateMethodologySpeciesRecord = await getTemplateMethodologySpeciesRecord( - Number(field_method_id), - Number(template_id), - connection - ); + const service = new ValidationService(connection); + await service.transformFile(submissionId, surveyId); await connection.commit(); - - const transformationSchema = templateMethodologySpeciesRecord?.transform; - - if (!transformationSchema) { - // TODO handle errors if no transformation schema is found? - // No schema to validate the template, insert error? - // See `xlsx/validate/getValidationSchema()` - return res.status(200).json({ - status: 'failed', - reason: 'Unable to fetch an appropriate transformation schema for your submission' - }); - } - - req['transformationSchema'] = transformationSchema; - - next(); } catch (error) { - defaultLog.debug({ label: 'getTransformationSchema', message: 'error', error }); + defaultLog.debug({ label: 'transform xlsx', message: 'error', error }); + // Unexpected error occurred, rolling DB back to safe state await connection.rollback(); + + // We still want to track that the submission failed to present to the user + const errorService = new ErrorService(connection); + await errorService.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.SYSTEM_ERROR); + await connection.commit(); throw error; } finally { connection.release(); } }; } - -export function getTransformationRules(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'getTransformationRules', message: 'xlsx transform' }); - - try { - const transformationSchema: JSON = req['transformationSchema']; - - const transformationSchemaParser = new TransformationSchemaParser(transformationSchema); - - req['transformationSchemaParser'] = transformationSchemaParser; - - next(); - } catch (error) { - defaultLog.debug({ label: 'getTransformationRules', message: 'error', error }); - throw error; - } - }; -} - -export function transformXLSX(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'transformXLSX', message: 'xlsx transform' }); - - try { - const xlsxCsv: XLSXCSV = req['xlsx']; - - const transformationSchemaParser: TransformationSchemaParser = req['transformationSchemaParser']; - - const xlsxTransformation = new XLSXTransformation(transformationSchemaParser, xlsxCsv); - - const transformedData = await xlsxTransformation.transform(); - - const worksheets = xlsxTransformation.dataToSheet(transformedData); - - const fileBuffers = Object.entries(worksheets).map(([fileName, worksheet]) => { - return { - name: fileName, - buffer: xlsxCsv.worksheetToBuffer(worksheet) - }; - }); - - req['fileBuffers'] = fileBuffers; - - next(); - } catch (error) { - defaultLog.debug({ label: 'transformXLSX', message: 'error', error }); - throw error; - } - }; -} - -export function persistTransformationResults(): RequestHandler { - return async (req, res, next) => { - try { - defaultLog.debug({ label: 'persistTransformationResults', message: 'xlsx transform' }); - const fileBuffers: { name: string; buffer: Buffer }[] = req['fileBuffers']; - - // Build the archive zip file - const dwcArchiveZip = new AdmZip(); - fileBuffers.forEach((file) => dwcArchiveZip.addFile(`${file.name}.csv`, file.buffer)); - - // Build output s3Key based on the original input s3Key - const s3Key: string = req['s3Key']; - - // Remove the filename from original s3Key - // project/1/survey/1/submission/file_name.txt -> project/1/survey/1/submission - const outputS3KeyPrefix = s3Key.split('/').slice(0, -1).join('/'); - - const xlsxCsv: XLSXCSV = req['xlsx']; - const outputFileName = `${xlsxCsv.rawFile.name}.zip`; - - const outputS3Key = `${outputS3KeyPrefix}/${outputFileName}`; - - // Upload transformed archive to s3 - await uploadBufferToS3(dwcArchiveZip.toBuffer(), 'application/zip', outputS3Key); - - const connection = getDBConnection(req['keycloak_token']); - - try { - await connection.open(); - - // Update occurrence submission record to include the transformed output file name and s3 key - await updateSurveyOccurrenceSubmissionWithOutputKey( - req.body.occurrence_submission_id, - outputFileName, - outputS3Key, - connection - ); - - await insertSubmissionStatus( - req.body.occurrence_submission_id, - SUBMISSION_STATUS_TYPE.TEMPLATE_TRANSFORMED, - connection - ); - - await connection.commit(); - - next(); - } catch (error) { - await connection.rollback(); - throw error; - } finally { - connection.release(); - } - } catch (error) { - defaultLog.debug({ label: 'persistTransformationResults', message: 'error', error }); - throw error; - } - }; -} diff --git a/api/src/paths/xlsx/validate.test.ts b/api/src/paths/xlsx/validate.test.ts index 89a7412836..fc11e3a21b 100644 --- a/api/src/paths/xlsx/validate.test.ts +++ b/api/src/paths/xlsx/validate.test.ts @@ -1,191 +1,240 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; +import OpenAPIRequestValidator, { OpenAPIRequestValidatorArgs } from 'openapi-request-validator'; +import OpenAPIResponseValidator, { OpenAPIResponseValidatorArgs } from 'openapi-response-validator'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import xlsx from 'xlsx'; -import { HTTPError } from '../../errors/custom-error'; -import survey_queries from '../../queries/survey'; -import { ArchiveFile, MediaFile } from '../../utils/media/media-file'; -import * as media_utils from '../../utils/media/media-utils'; -import { getMockDBConnection } from '../../__mocks__/db'; +import * as db from '../../database/db'; +import { HTTPError } from '../../errors/http-error'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; +import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; import * as validate from './validate'; +import { POST } from './validate'; chai.use(sinonChai); -describe('prepXLSX', () => { - const sampleReq = { - keycloak_token: {}, - s3File: { - fieldname: 'media', - originalname: 'test.txt', - encoding: '7bit', - mimetype: 'text/plain', - size: 340 - } - } as any; - - afterEach(() => { - sinon.restore(); - }); - - it('should set parseError when failed to parse s3File', async () => { - const nextSpy = sinon.spy(); - - sinon.stub(media_utils, 'parseUnknownMedia').returns(null); - - const result = validate.prepXLSX(); - await result(sampleReq, (null as unknown) as any, nextSpy as any); - - expect(sampleReq.parseError).to.eql('Failed to parse submission, file was empty'); - expect(nextSpy).to.have.been.called; - }); - - it('should set parseError when not a valid xlsx csv file', async () => { - const nextSpy = sinon.spy(); - - sinon.stub(media_utils, 'parseUnknownMedia').returns(('not a csv file' as unknown) as ArchiveFile); - - const result = validate.prepXLSX(); - await result(sampleReq, (null as unknown) as any, nextSpy as any); - - expect(sampleReq.parseError).to.eql('Failed to parse submission, not a valid XLSX CSV file'); - expect(nextSpy).to.have.been.called; - }); - - it('should set parseError when no custom props set for the XLSX CSV file', async () => { - const nextSpy = sinon.spy(); - - const newWorkbook = xlsx.utils.book_new(); - - if (!newWorkbook.Custprops) { - newWorkbook.Custprops = {}; - } - - const ws_name = 'SheetJS'; - - /* make worksheet */ - const ws_data = [ - ['S', 'h', 'e', 'e', 't', 'J', 'S'], - [1, 2, 3, 4, 5] - ]; - const ws = xlsx.utils.aoa_to_sheet(ws_data); - - /* Add the worksheet to the workbook */ - xlsx.utils.book_append_sheet(newWorkbook, ws, ws_name); - - const buffer = xlsx.write(newWorkbook, { type: 'buffer' }); - - const mediaFile = new MediaFile('fileName', 'text/csv', buffer); - - sinon.stub(media_utils, 'parseUnknownMedia').returns(mediaFile); - - const requestHandler = validate.prepXLSX(); - await requestHandler(sampleReq, (null as unknown) as any, nextSpy as any); - - expect(sampleReq.parseError).to.eql('Failed to parse submission, template identification properties are missing'); - expect(nextSpy).to.have.been.called; - }); - - it('should call next when parameters are valid', async () => { - const nextSpy = sinon.spy(); - - const newWorkbook = xlsx.utils.book_new(); - - if (!newWorkbook.Custprops) { - newWorkbook.Custprops = {}; - } - newWorkbook.Custprops['sims_template_id'] = 1; - newWorkbook.Custprops['sims_csm_id'] = 1; - newWorkbook.Custprops['sims_species_id'] = 1234; - - const ws_name = 'SheetJS'; - - /* make worksheet */ - const ws_data = [ - ['S', 'h', 'e', 'e', 't', 'J', 'S'], - [1, 2, 3, 4, 5] - ]; - const ws = xlsx.utils.aoa_to_sheet(ws_data); - - /* Add the worksheet to the workbook */ - xlsx.utils.book_append_sheet(newWorkbook, ws, ws_name); +describe('xlsx/validate', () => { + describe('openApiSchema', () => { + describe('request validation', () => { + const requestValidator = new OpenAPIRequestValidator((POST.apiDoc as unknown) as OpenAPIRequestValidatorArgs); + + describe('should throw an error when', () => { + describe('request body', () => { + it('is null', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + body: {} + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('occurrence_submission_id'); + expect(response.errors[1].path).to.equal('survey_id'); + expect(response.errors[0].message).to.equal(`must have required property 'occurrence_submission_id'`); + expect(response.errors[1].message).to.equal(`must have required property 'survey_id'`); + expect(response.errors[2]).to.be.undefined; + }); + + it('is missing required fields', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + + body: { survey_id: 1 } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('occurrence_submission_id'); + expect(response.errors[0].message).to.equal(`must have required property 'occurrence_submission_id'`); + }); + + it('fields are undefined', async () => { + const request = { + headers: { + 'content-type': 'application/json' + }, + + body: { survey_id: undefined, occurrence_submission_id: undefined } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].path).to.equal('occurrence_submission_id'); + expect(response.errors[1].path).to.equal('survey_id'); + expect(response.errors[0].message).to.equal(`must have required property 'occurrence_submission_id'`); + expect(response.errors[1].message).to.equal(`must have required property 'survey_id'`); + expect(response.errors[2]).to.be.undefined; + }); + }); + + describe('survey_id and occurrence_submission_id', () => { + it('have invalid type ', async () => { + const request = { + headers: { 'content-type': 'application/json' }, + body: { survey_id: 'not a number', occurrence_submission_id: 'not a number' } + }; + + const response = requestValidator.validateRequest(request); + + expect(response.status).to.equal(400); + expect(response.errors[0].message).to.equal('must be number'); + expect(response.errors[1].message).to.equal('must be number'); + }); + }); + }); - const buffer = xlsx.write(newWorkbook, { type: 'buffer' }); + describe('should succeed when', () => { + it('required values are valid', async () => { + const request = { + headers: { 'content-type': 'application/json' }, + body: { survey_id: 1, occurrence_submission_id: 2 } + }; - const mediaFile = new MediaFile('fileName', 'text/csv', buffer); + const response = requestValidator.validateRequest(request); - sinon.stub(media_utils, 'parseUnknownMedia').returns(mediaFile); + expect(response).to.be.undefined; + }); + }); + }); - const requestHandler = validate.prepXLSX(); - await requestHandler(sampleReq, (null as unknown) as any, nextSpy as any); + describe('response validation', () => { + const responseValidator = new OpenAPIResponseValidator((POST.apiDoc as unknown) as OpenAPIResponseValidatorArgs); - expect(nextSpy).to.have.been.called; - }); -}); + describe('should succeed when', () => { + it('returns a null response', async () => { + const apiResponse = null; + const response = responseValidator.validateResponse(200, apiResponse); -describe('getTemplateMethodologySpeciesRecord', () => { - afterEach(() => { - sinon.restore(); - }); + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be object'); + }); - const dbConnectionObj = getMockDBConnection(); + it('optional values are valid', async () => { + const apiResponse = { status: 'my status', reason: 'my_reason' }; + const response = responseValidator.validateResponse(200, apiResponse); - it('should throw 400 error when failed to build getTemplateMethodologySpeciesRecordSQL statement', async () => { - sinon.stub(survey_queries, 'getTemplateMethodologySpeciesRecordSQL').returns(null); + expect(response).to.equal(undefined); + }); + }); - try { - await validate.getTemplateMethodologySpeciesRecord(1, 1, { ...dbConnectionObj, systemUserId: () => 20 }); + describe('should fail when', () => { + it('optional values are invalid', async () => { + const apiResponse = { status: 1, reason: 1 }; + const response = responseValidator.validateResponse(200, apiResponse); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal( - 'Failed to build SQL get template methodology species record sql statement' - ); - } + expect(response.message).to.equal('The response was not valid.'); + expect(response.errors[0].message).to.equal('must be string'); + }); + }); + }); }); - it('should return null when no rows', async () => { - const mockQuery = sinon.stub(); - - mockQuery.resolves({ - rows: [null] + describe('validate XLSX', () => { + afterEach(() => { + sinon.restore(); }); - sinon.stub(survey_queries, 'getTemplateMethodologySpeciesRecordSQL').returns(SQL`something`); + it('throws an error when req.body.occurrence_submission_id is empty', async () => { + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = {}; - try { - await validate.getTemplateMethodologySpeciesRecord(1, 1, { - ...dbConnectionObj, - systemUserId: () => 20 - }); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to query template methodology species table'); - } - }); - - it('should return first row on success', async () => { - const mockQuery = sinon.stub(); + const requestHandler = validate.validate(); - mockQuery.resolves({ - rows: [ - { - id: 1 - } - ] + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTPError).status).to.equal(400); + expect((actualError as HTTPError).message).to.equal('Missing required parameter `occurrence field`'); + } }); - sinon.stub(survey_queries, 'getTemplateMethodologySpeciesRecordSQL').returns(SQL`something`); + it('returns a 200 if req.body.occurrence_submission_id exists', async () => { + const dbConnectionObj = getMockDBConnection(); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + mockReq['keycloak_token'] = 'token'; + + const validateFileStub = sinon.stub(ValidationService.prototype, 'validateFile').resolves(); + + const requestHandler = validate.validate(); + await requestHandler(mockReq, mockRes, mockNext); + expect(mockRes.statusValue).to.equal(200); + expect(validateFileStub).to.have.been.calledOnceWith(mockReq.body.occurrence_submission_id); + expect(mockRes.jsonValue).to.eql({ status: 'success' }); + }); - const result = await validate.getTemplateMethodologySpeciesRecord(1, 1, { - ...dbConnectionObj, - query: mockQuery, - systemUserId: () => 20 + it('catches an error on validateFile', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const validateFileStub = sinon + .stub(ValidationService.prototype, 'validateFile') + .throws(new Error('test validateFile error')); + const errorServiceStub = sinon.stub(ErrorService.prototype, 'insertSubmissionStatus').resolves(); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = validate.validate(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(validateFileStub).to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test validateFile error'); + } }); - expect(result).to.eql({ id: 1 }); + it('catches an error on insertSubmissionStatus', async () => { + const dbConnectionObj = getMockDBConnection({ rollback: sinon.stub(), release: sinon.stub() }); + sinon.stub(db, 'getDBConnection').returns(dbConnectionObj); + + const validateFileStub = sinon + .stub(ValidationService.prototype, 'validateFile') + .throws(new Error('test validateFile error')); + const errorServiceStub = sinon + .stub(ErrorService.prototype, 'insertSubmissionStatus') + .throws(new Error('test insertSubmissionStatus error')); + + const { mockReq, mockRes, mockNext } = getRequestHandlerMocks(); + mockReq['keycloak_token'] = 'token'; + + mockReq.body = { + occurrence_submission_id: '123-456-789' + }; + + const requestHandler = validate.validate(); + + try { + await requestHandler(mockReq, mockRes, mockNext); + expect.fail(); + } catch (actualError) { + expect(validateFileStub).to.have.been.calledOnce; + expect(errorServiceStub).to.have.been.calledOnce; + expect(dbConnectionObj.rollback).to.have.been.calledOnce; + expect(dbConnectionObj.release).to.have.been.calledOnce; + expect((actualError as Error).message).to.equal('test insertSubmissionStatus error'); + } + }); }); }); diff --git a/api/src/paths/xlsx/validate.ts b/api/src/paths/xlsx/validate.ts index 35a0680768..463493478c 100644 --- a/api/src/paths/xlsx/validate.ts +++ b/api/src/paths/xlsx/validate.ts @@ -1,28 +1,13 @@ import { RequestHandler } from 'express'; import { Operation } from 'express-openapi'; import { PROJECT_ROLE } from '../../constants/roles'; -import { getDBConnection, IDBConnection } from '../../database/db'; -import { HTTP400 } from '../../errors/custom-error'; -import { queries } from '../../queries/queries'; +import { SUBMISSION_STATUS_TYPE } from '../../constants/status'; +import { getDBConnection } from '../../database/db'; +import { HTTP400 } from '../../errors/http-error'; import { authorizeRequestHandler } from '../../request-handlers/security/authorization'; +import { ErrorService } from '../../services/error-service'; +import { ValidationService } from '../../services/validation-service'; import { getLogger } from '../../utils/logger'; -import { ICsvState } from '../../utils/media/csv/csv-file'; -import { IMediaState, MediaFile } from '../../utils/media/media-file'; -import { parseUnknownMedia } from '../../utils/media/media-utils'; -import { ValidationSchemaParser } from '../../utils/media/validation/validation-schema-parser'; -import { XLSXCSV } from '../../utils/media/xlsx/xlsx-file'; -import { - getOccurrenceSubmission, - getOccurrenceSubmissionInputS3Key, - getS3File, - getValidateAPIDoc, - getValidationRules, - insertSubmissionMessage, - insertSubmissionStatus, - persistParseErrors, - persistValidationResults, - sendResponse -} from '../dwc/validate'; const defaultLog = getLogger('paths/xlsx/validate'); @@ -38,174 +23,108 @@ export const POST: Operation = [ ] }; }), - getOccurrenceSubmission(), - getOccurrenceSubmissionInputS3Key(), - getS3File(), - prepXLSX(), - persistParseErrors(), - getValidationSchema(), - getValidationRules(), - validateXLSX(), - persistValidationResults({ initialSubmissionStatusType: 'Template Validated' }), - sendResponse() + validate() ]; POST.apiDoc = { - ...getValidateAPIDoc( - 'Validates an XLSX survey observation submission.', - 'Validate XLSX survey observation submission OK', - ['survey', 'observation', 'xlsx'] - ) -}; - -export function prepXLSX(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'prepXLSX', message: 's3File' }); - - try { - const s3File = req['s3File']; - - const parsedMedia = parseUnknownMedia(s3File); - - if (!parsedMedia) { - req['parseError'] = 'Failed to parse submission, file was empty'; - - return next(); - } - - if (!(parsedMedia instanceof MediaFile)) { - req['parseError'] = 'Failed to parse submission, not a valid XLSX CSV file'; - - return next(); + description: 'Validates an XLSX survey observation submission.', + tags: ['survey', 'observation', 'xlsx'], + security: [ + { + Bearer: [] + } + ], + requestBody: { + description: 'Request body', + content: { + 'application/json': { + schema: { + type: 'object', + required: ['occurrence_submission_id', 'survey_id'], + properties: { + project_id: { + type: 'number' + }, + occurrence_submission_id: { + description: 'A survey occurrence submission ID', + type: 'number', + example: 1 + }, + survey_id: { + type: 'number' + } + } + } } - - const xlsxCsv = new XLSXCSV(parsedMedia); - - const template_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_template_id; - const species_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_species_id; - const csm_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_csm_id; - - if (!template_id || !species_id || !csm_id) { - req['parseError'] = 'Failed to parse submission, template identification properties are missing'; + } + }, + responses: { + 200: { + description: 'Validate XLSX survey observation submission OK', + content: { + 'application/json': { + schema: { + type: 'object', + properties: { + status: { + type: 'string' + }, + reason: { + type: 'string' + } + } + } + } } + }, + 400: { + $ref: '#/components/responses/400' + }, + 401: { + $ref: '#/components/responses/401' + }, + 403: { + $ref: '#/components/responses/403' + }, + 500: { + $ref: '#/components/responses/500' + }, + default: { + $ref: '#/components/responses/default' + } + } +}; - req['xlsx'] = xlsxCsv; - - next(); - } catch (error) { - defaultLog.error({ label: 'prepXLSX', message: 'error', error }); - throw error; +export function validate(): RequestHandler { + return async (req, res) => { + const submissionId = req.body.occurrence_submission_id; + const surveyId = req.body.survey_id; + if (!submissionId) { + throw new HTTP400('Missing required parameter `occurrence field`'); } - }; -} -export function getValidationSchema(): RequestHandler { - return async (req, res, next) => { - const connection = getDBConnection(req['keycloak_token']); + res.status(200).json({ status: 'success' }); + const connection = getDBConnection(req['keycloak_token']); try { await connection.open(); - const xlsxCsv = req['xlsx']; - const template_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_template_id; - const field_method_id = xlsxCsv.workbook.rawWorkbook.Custprops.sims_csm_id; - - const templateMethodologySpeciesRecord = await getTemplateMethodologySpeciesRecord( - Number(field_method_id), - Number(template_id), - connection - ); - - const validationSchema = templateMethodologySpeciesRecord?.validation; - - if (!validationSchema) { - // no schema to validate the template, generate error + const validationService = new ValidationService(connection); + await validationService.validateFile(submissionId, surveyId); - const submissionStatusId = await insertSubmissionStatus( - req.body.occurrence_submission_id, - 'System Error', - connection - ); - - await insertSubmissionMessage( - submissionStatusId, - 'Error', - `Unable to fetch an appropriate template validation schema for your submission`, - 'Missing Validation Schema', - connection - ); - - await connection.commit(); - - return res.status(200).json({ status: 'failed' }); - } - - req['validationSchema'] = validationSchema; - - next(); + await connection.commit(); } catch (error) { - defaultLog.error({ label: 'getValidationSchema', message: 'error', error }); + defaultLog.error({ label: 'validate xlsx', message: 'error', error }); + // Unexpected error occurred, rolling DB back to safe state await connection.rollback(); + + // We still want to track that the submission failed to present to the user + const errorService = new ErrorService(connection); + await errorService.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.SYSTEM_ERROR); + await connection.commit(); throw error; } finally { connection.release(); } }; } - -export function validateXLSX(): RequestHandler { - return async (req, res, next) => { - defaultLog.debug({ label: 'validateXLSX', message: 'xlsx' }); - - try { - const xlsxCsv: XLSXCSV = req['xlsx']; - - const validationSchemaParser: ValidationSchemaParser = req['validationSchemaParser']; - - const mediaState: IMediaState = xlsxCsv.isMediaValid(validationSchemaParser); - - req['mediaState'] = mediaState; - - if (!mediaState.isValid) { - // The file itself is invalid, skip remaining validation - return next(); - } - - const csvState: ICsvState[] = xlsxCsv.isContentValid(validationSchemaParser); - - req['csvState'] = csvState; - - next(); - } catch (error) { - defaultLog.error({ label: 'validateXLSX', message: 'error', error }); - throw error; - } - }; -} - -/** - * Get a template_methodology_species record from the template_methodologies_species table - * - * @param {number} fieldMethodId - * @param {number} templateId - * @param {IDBConnection} connection - * @return {*} {Promise} - */ -export const getTemplateMethodologySpeciesRecord = async ( - fieldMethodId: number, - templateId: number, - connection: IDBConnection -): Promise => { - const sqlStatement = queries.survey.getTemplateMethodologySpeciesRecordSQL(fieldMethodId, templateId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get template methodology species record sql statement'); - } - const response = await connection.query(sqlStatement.text, sqlStatement.values); - - if (!response) { - throw new HTTP400('Failed to query template methodology species table'); - } - - return (response && response.rows && response.rows[0]) || null; -}; diff --git a/api/src/queries/codes/db-constant-queries.test.ts b/api/src/queries/codes/db-constant-queries.test.ts new file mode 100644 index 0000000000..3c1614da51 --- /dev/null +++ b/api/src/queries/codes/db-constant-queries.test.ts @@ -0,0 +1,36 @@ +import { expect } from 'chai'; +import { describe } from 'mocha'; +import { + getDbCharacterSystemConstantSQL, + getDbCharacterSystemMetaDataConstantSQL, + getDbNumericSystemConstantSQL, + getDbNumericSystemMetaDataConstantSQL +} from './db-constant-queries'; + +describe('getDbCharacterSystemConstantSQL', () => { + it('returns valid sql statement', () => { + const response = getDbCharacterSystemConstantSQL('string'); + expect(response).to.not.be.null; + }); +}); + +describe('getDbNumericSystemConstantSQL', () => { + it('returns valid sql statement', () => { + const response = getDbNumericSystemConstantSQL('string'); + expect(response).to.not.be.null; + }); +}); + +describe('getDbCharacterSystemMetaDataConstantSQL', () => { + it('returns valid sql statement', () => { + const response = getDbCharacterSystemMetaDataConstantSQL('string'); + expect(response).to.not.be.null; + }); +}); + +describe('getDbNumericSystemMetaDataConstantSQL', () => { + it('returns valid sql statement', () => { + const response = getDbNumericSystemMetaDataConstantSQL('string'); + expect(response).to.not.be.null; + }); +}); diff --git a/api/src/queries/database/index.ts b/api/src/queries/database/index.ts deleted file mode 100644 index b596c80ee2..0000000000 --- a/api/src/queries/database/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import * as userContext from './user-context-queries'; - -export default { ...userContext }; diff --git a/api/src/queries/database/user-context-queries.test.ts b/api/src/queries/database/user-context-queries.test.ts deleted file mode 100644 index d471a59498..0000000000 --- a/api/src/queries/database/user-context-queries.test.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { SYSTEM_IDENTITY_SOURCE } from '../../constants/database'; -import { setSystemUserContextSQL } from './user-context-queries'; - -describe('setSystemUserContextSQL', () => { - it('has empty userIdentifier', () => { - const response = setSystemUserContextSQL('', SYSTEM_IDENTITY_SOURCE.IDIR); - - expect(response).to.be.null; - }); - - it('identifies an IDIR user', () => { - const response = setSystemUserContextSQL('idir-user', SYSTEM_IDENTITY_SOURCE.IDIR); - - expect(response).not.to.be.null; - }); - - it('identifies a BCEID user', () => { - const response = setSystemUserContextSQL('bceid-user', SYSTEM_IDENTITY_SOURCE.BCEID); - - expect(response).not.to.be.null; - }); -}); diff --git a/api/src/queries/database/user-context-queries.ts b/api/src/queries/database/user-context-queries.ts deleted file mode 100644 index f4b81f48a9..0000000000 --- a/api/src/queries/database/user-context-queries.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { SYSTEM_IDENTITY_SOURCE } from '../../constants/database'; - -export const setSystemUserContextSQL = ( - userIdentifier: string, - systemUserType: SYSTEM_IDENTITY_SOURCE -): SQLStatement | null => { - if (!userIdentifier) { - return null; - } - - return SQL`select api_set_context(${userIdentifier}, ${systemUserType});`; -}; diff --git a/api/src/queries/dwc/dwc-queries.ts b/api/src/queries/dwc/dwc-queries.ts deleted file mode 100644 index 7c9f7d24f3..0000000000 --- a/api/src/queries/dwc/dwc-queries.ts +++ /dev/null @@ -1,458 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to get submission occurrence record given package ID for a particular survey. - * - * @param {number} dataPackageId - * @returns {SQLStatement} sql query object - */ -export const getSurveyOccurrenceSubmissionSQL = (dataPackageId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - SELECT - os.* - from - occurrence_submission os - , occurrence_submission_data_package osdp - where - osdp.data_package_id = ${dataPackageId} - and os.occurrence_submission_id = osdp.occurrence_submission_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get data package record given package ID. - * - * @param {number} dataPackageId - * @returns {SQLStatement} sql query object - */ -export const getDataPackageSQL = (dataPackageId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - SELECT - * - from - data_package - where - data_package_id = ${dataPackageId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get occurrence submission publish date. - * - * @param {number} occurrenceSubmissionId - * @returns {SQLStatement} sql query object - */ -export const getPublishedSurveyStatusSQL = (occurrenceSubmissionId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - SELECT - * - from - survey_status - where - survey_status = api_get_character_system_constant('OCCURRENCE_SUBMISSION_STATE_PUBLISHED') - and occurrence_submission_id = ${occurrenceSubmissionId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get survey data. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveySQL = (surveyId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - SELECT - survey_id, - project_id, - field_method_id, - uuid, - name, - objectives, - start_date, - lead_first_name, - lead_last_name, - end_date, - location_description, - location_name, - publish_timestamp, - create_date, - create_user, - update_date, - update_user, - revision_count - from - survey - where survey_id = ${surveyId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get project data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - SELECT - project_id, - project_type_id, - uuid, - name, - objectives, - location_description, - start_date, - end_date, - caveats, - comments, - coordinator_first_name, - coordinator_last_name, - coordinator_email_address, - coordinator_agency_name, - coordinator_public, - publish_timestamp, - create_date, - create_user, - update_date, - update_user, - revision_count - from - project - where project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get survey funding source data. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveyFundingSourceSQL = (surveyId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.*, - b.name investment_action_category_name, - c.name funding_source_name - from - project_funding_source a, - investment_action_category b, - funding_source c - where - project_funding_source_id in ( - select - project_funding_source_id - from - survey_funding_source - where - survey_id = ${surveyId}) - and b.investment_action_category_id = a.investment_action_category_id - and c.funding_source_id = b.funding_source_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get project funding source data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectFundingSourceSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.*, - b.name investment_action_category_name, - c.name funding_source_name - from - project_funding_source a, - investment_action_category b, - funding_source c - where - project_id = ${projectId} - and b.investment_action_category_id = a.investment_action_category_id - and c.funding_source_id = b.funding_source_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get geometry bounding box. - * - * @param {number} primaryKey - * @param {string} primaryKeyName - * @param {string} targetTable - * @returns {SQLStatement} sql query object - */ -export const getGeometryBoundingBoxSQL = ( - primaryKey: number, - primaryKeyName: string, - targetTable: string -): SQLStatement => { - // TODO: this only provides us with the bounding box of the first polygon - const sqlStatement: SQLStatement = SQL` - with envelope as ( - select - ST_Envelope(geography::geometry) geom - from ` - .append(targetTable) - .append( - SQL` - where ` - ) - .append(primaryKeyName).append(SQL` = ${primaryKey}) - select - st_xmax(geom), - st_ymax(geom), - st_xmin(geom), - st_ymin(geom) - from - envelope; - `); - - return sqlStatement; -}; - -/** - * SQL query to get geometry polygons. - * - * @param {number} primaryKey - * @param {string} primaryKeyName - * @param {string} targetTable - * @returns {SQLStatement} sql query object - */ -export const getGeometryPolygonsSQL = ( - primaryKey: number, - primaryKeyName: string, - targetTable: string -): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - with polygons as ( - select - (st_dumppoints(g.geom)).* - from ( - select - geography::geometry as geom - from ` - .append(targetTable) - .append( - SQL` - where ` - ) - .append(primaryKeyName).append(SQL` = ${primaryKey}) as g), - points as ( - select - path[1] polygon, - path[2] point, - jsonb_build_array(st_y(p.geom), st_x(p.geom)) points - from - polygons p - order by - path[1], - path[2]) - select - json_agg(p.points) points - from - points p - group by - polygon; - `); - - return sqlStatement; -}; - -/** - * SQL query to get taxonomic coverage. - * - * @param {number} surveyId - * @param {boolean} isFocal - * @returns {SQLStatement} sql query object - */ -export const getTaxonomicCoverageSQL = (surveyId: number, isFocal: boolean): SQLStatement => { - let focalPredicate = 'and b.is_focal'; - if (!isFocal) { - focalPredicate = 'and not b.is_focal'; - } - - // TODO replace call to wldtaxonomic_units with a call to the taxonomy service - const sqlStatement: SQLStatement = SQL` - select - a.* - from - wldtaxonomic_units a, - study_species b - where - a.wldtaxonomic_units_id = b.wldtaxonomic_units_id - and b.survey_id = ${surveyId} - `.append(focalPredicate); - - return sqlStatement; -}; - -/** - * SQL query to get project IUCN conservation data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectIucnConservationSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.name level_1_name, - b.name level_2_name, - c.name level_3_name - from - iucn_conservation_action_level_1_classification a, - iucn_conservation_action_level_2_subclassification b, - iucn_conservation_action_level_3_subclassification c, - project_iucn_action_classification d - where - d.project_id = ${projectId} - and c.iucn_conservation_action_level_3_subclassification_id = d.iucn_conservation_action_level_3_subclassification_id - and b.iucn_conservation_action_level_2_subclassification_id = c.iucn_conservation_action_level_2_subclassification_id - and a.iucn_conservation_action_level_1_classification_id = b.iucn_conservation_action_level_1_classification_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get project stakeholder partnership data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectStakeholderPartnershipSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.name - from - stakeholder_partnership a - where - a.project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get project activity data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectActivitySQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.name - from - activity a, - project_activity b - where - b.project_id = ${projectId} - and a.activity_id = b.activity_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get climate initiative data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectClimateInitiativeSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.name - from - climate_change_initiative a, - project_climate_initiative b - where - b.project_id = ${projectId} - and a.climate_change_initiative_id = b.climate_change_initiative_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get project first nations data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectFirstNationsSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.name - from - first_nations a, - project_first_nation b - where - b.project_id = ${projectId} - and a.first_nations_id = b.first_nations_id; - `; - - return sqlStatement; -}; - -/** - * SQL query to get project management actions data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectManagementActionsSQL = (projectId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.* - from - management_action_type a, - project_management_actions b - where - a.management_action_type_id = b.management_action_type_id - and b.project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get survey proprietor data. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getSurveyProprietorSQL = (surveyId: number): SQLStatement => { - const sqlStatement: SQLStatement = SQL` - select - a.name proprietor_type_name, - b.name first_nations_name, - c.* - from - proprietor_type a, - first_nations b, - survey_proprietor c - where - c.survey_id = ${surveyId} - and b.first_nations_id = c.first_nations_id - and a.proprietor_type_id = c.proprietor_type_id; - `; - - return sqlStatement; -}; diff --git a/api/src/queries/dwc/index.ts b/api/src/queries/dwc/index.ts deleted file mode 100644 index 87a461dc6b..0000000000 --- a/api/src/queries/dwc/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import * as dwc from './dwc-queries'; - -export default { ...dwc }; diff --git a/api/src/queries/occurrence/index.ts b/api/src/queries/occurrence/index.ts deleted file mode 100644 index 27d7393a03..0000000000 --- a/api/src/queries/occurrence/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import * as occurrenceCreate from './occurrence-create-queries'; -import * as occurrenceView from './occurrence-view-queries'; - -export default { ...occurrenceCreate, ...occurrenceView }; diff --git a/api/src/queries/occurrence/occurrence-create-queries.test.ts b/api/src/queries/occurrence/occurrence-create-queries.test.ts deleted file mode 100644 index 0b69c5c8e5..0000000000 --- a/api/src/queries/occurrence/occurrence-create-queries.test.ts +++ /dev/null @@ -1,30 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { PostOccurrence } from '../../models/occurrence-create'; -import { postOccurrenceSQL } from './occurrence-create-queries'; - -describe('postOccurrenceSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = postOccurrenceSQL((null as unknown) as number, {} as PostOccurrence); - - expect(response).to.be.null; - }); - - it('returns null response when null occurrence provided', () => { - const response = postOccurrenceSQL(1, (null as unknown) as PostOccurrence); - - expect(response).to.be.null; - }); - - it('returns non null response when valid surveyId and occurrence provided', () => { - const response = postOccurrenceSQL(1, new PostOccurrence()); - - expect(response).to.not.be.null; - }); - - it('returns non null response when occurrence has verbatimCoordinates', () => { - const response = postOccurrenceSQL(1, new PostOccurrence({ verbatimCoordinates: '9N 300457 5884632' })); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/occurrence/occurrence-create-queries.ts b/api/src/queries/occurrence/occurrence-create-queries.ts deleted file mode 100644 index 9434791103..0000000000 --- a/api/src/queries/occurrence/occurrence-create-queries.ts +++ /dev/null @@ -1,71 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { PostOccurrence } from '../../models/occurrence-create'; -import { parseLatLongString, parseUTMString } from '../../utils/spatial-utils'; - -export const postOccurrenceSQL = (occurrenceSubmissionId: number, occurrence: PostOccurrence): SQLStatement | null => { - if (!occurrenceSubmissionId || !occurrence) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO occurrence ( - occurrence_submission_id, - taxonid, - lifestage, - sex, - data, - vernacularname, - eventdate, - individualcount, - organismquantity, - organismquantitytype, - geography - ) VALUES ( - ${occurrenceSubmissionId}, - ${occurrence.associatedTaxa}, - ${occurrence.lifeStage}, - ${occurrence.sex}, - ${occurrence.data}, - ${occurrence.vernacularName}, - ${occurrence.eventDate}, - ${occurrence.individualCount}, - ${occurrence.organismQuantity}, - ${occurrence.organismQuantityType} - `; - - const utm = parseUTMString(occurrence.verbatimCoordinates); - const latLong = parseLatLongString(occurrence.verbatimCoordinates); - - if (utm) { - // transform utm string into point, if it is not null - sqlStatement.append(SQL` - ,public.ST_Transform( - public.ST_SetSRID( - public.ST_MakePoint(${utm.easting}, ${utm.northing}), - ${utm.zone_srid} - ), - 4326 - ) - `); - } else if (latLong) { - // transform latLong string into point, if it is not null - sqlStatement.append(SQL` - ,public.ST_Transform( - public.ST_SetSRID( - public.ST_MakePoint(${latLong.long}, ${latLong.lat}), - 4326 - ), - 4326 - ) - `); - } else { - // insert null geography - sqlStatement.append(SQL` - ,null - `); - } - - sqlStatement.append(');'); - - return sqlStatement; -}; diff --git a/api/src/queries/occurrence/occurrence-view-queries.test.ts b/api/src/queries/occurrence/occurrence-view-queries.test.ts deleted file mode 100644 index ec698bb0fc..0000000000 --- a/api/src/queries/occurrence/occurrence-view-queries.test.ts +++ /dev/null @@ -1,17 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { getOccurrencesForViewSQL } from './occurrence-view-queries'; - -describe('getOccurrencesForViewSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = getOccurrencesForViewSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid occurrenceSubmissionId provided', () => { - const response = getOccurrencesForViewSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/occurrence/occurrence-view-queries.ts b/api/src/queries/occurrence/occurrence-view-queries.ts deleted file mode 100644 index 7c77743c2c..0000000000 --- a/api/src/queries/occurrence/occurrence-view-queries.ts +++ /dev/null @@ -1,33 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -export const getOccurrencesForViewSQL = (occurrenceSubmissionId: number): SQLStatement | null => { - if (!occurrenceSubmissionId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - public.ST_asGeoJSON(o.geography) as geometry, - o.taxonid, - o.occurrence_id, - o.lifestage, - o.sex, - o.vernacularname, - o.individualcount, - o.organismquantity, - o.organismquantitytype, - o.eventdate - FROM - occurrence as o - LEFT OUTER JOIN - occurrence_submission as os - ON - o.occurrence_submission_id = os.occurrence_submission_id - WHERE - o.occurrence_submission_id = ${occurrenceSubmissionId} - AND - os.delete_timestamp is null; - `; - - return sqlStatement; -}; diff --git a/api/src/queries/permit/index.ts b/api/src/queries/permit/index.ts deleted file mode 100644 index 639ee81258..0000000000 --- a/api/src/queries/permit/index.ts +++ /dev/null @@ -1,5 +0,0 @@ -import * as permitCreate from './permit-create-queries'; -import * as permitUpdate from './permit-update-queries'; -import * as permitView from './permit-view-queries'; - -export default { ...permitCreate, ...permitUpdate, ...permitView }; diff --git a/api/src/queries/permit/permit-create-queries.test.ts b/api/src/queries/permit/permit-create-queries.test.ts deleted file mode 100644 index 16594109fb..0000000000 --- a/api/src/queries/permit/permit-create-queries.test.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { IPostPermitNoSampling } from '../../models/permit-no-sampling'; -import { PostCoordinatorData } from '../../models/project-create'; -import { postPermitNoSamplingSQL, postProjectPermitSQL } from './permit-create-queries'; - -describe('postPermitNoSamplingSQL', () => { - const data = { - permit_number: '123', - permit_type: 'permit type', - first_name: 'first', - last_name: 'last', - email_address: 'email', - coordinator_agency: 'agency', - share_contact_details: false - }; - - it('returns null when no noSamplePermit provided', () => { - const response = postPermitNoSamplingSQL((null as unknown) as IPostPermitNoSampling & PostCoordinatorData, 1); - - expect(response).to.be.null; - }); - - it('returns null when no systemUserId provided', () => { - const response = postPermitNoSamplingSQL(data, null); - - expect(response).to.be.null; - }); - - it('returns a SQLStatement when all fields are passed in as expected', () => { - const response = postPermitNoSamplingSQL(data, 1); - - expect(response).to.not.be.null; - - expect(response?.values.length).to.equal(7); - - expect(response?.values).to.deep.include('123'); - expect(response?.values).to.deep.include('first'); - expect(response?.values).to.deep.include('last'); - expect(response?.values).to.deep.include('email'); - expect(response?.values).to.deep.include('agency'); - }); -}); - -describe('postProjectPermitSQL', () => { - it('returns null when no permit number', () => { - const response = postProjectPermitSQL((null as unknown) as string, 'type', 1, 1); - - expect(response).to.be.null; - }); - - it('returns null when no permit type', () => { - const response = postProjectPermitSQL('123', (null as unknown) as string, 1, 1); - - expect(response).to.be.null; - }); - - it('returns null when no project id', () => { - const response = postProjectPermitSQL('123', 'type', (null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null when no system user id', () => { - const response = postProjectPermitSQL('123', 'type', 1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a SQLStatement when all fields are passed in as expected', () => { - const response = postProjectPermitSQL('123', 'type', 123, 2); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include('123'); - }); -}); diff --git a/api/src/queries/permit/permit-create-queries.ts b/api/src/queries/permit/permit-create-queries.ts deleted file mode 100644 index 21d8937e66..0000000000 --- a/api/src/queries/permit/permit-create-queries.ts +++ /dev/null @@ -1,81 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { IPostPermitNoSampling } from '../../models/permit-no-sampling'; -import { PostCoordinatorData } from '../../models/project-create'; - -/** - * SQL query to insert a permit row for permit associated to a project. - * - * @param {string} permitNumber - * @param {string} permitType - * @param {number} projectId - * @param {number} systemUserId - * @returns {SQLStatement} sql query object - */ -export const postProjectPermitSQL = ( - permitNumber: string, - permitType: string, - projectId: number, - systemUserId: number -): SQLStatement | null => { - if (!permitNumber || !permitType || !projectId || !systemUserId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO permit ( - project_id, - number, - type, - system_user_id - ) VALUES ( - ${projectId}, - ${permitNumber}, - ${permitType}, - ${systemUserId} - ) - RETURNING - permit_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a no sample permit row. - * - * @param {(IPostPermit & PostCoordinatorData)} noSamplePermit - * @param {number | null} systemUserId - * @returns {SQLStatement} sql query object - */ -export const postPermitNoSamplingSQL = ( - noSamplePermit: IPostPermitNoSampling & PostCoordinatorData, - systemUserId: number | null -): SQLStatement | null => { - if (!noSamplePermit || !systemUserId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO permit ( - number, - type, - coordinator_first_name, - coordinator_last_name, - coordinator_email_address, - coordinator_agency_name, - system_user_id - ) VALUES ( - ${noSamplePermit.permit_number}, - ${noSamplePermit.permit_type}, - ${noSamplePermit.first_name}, - ${noSamplePermit.last_name}, - ${noSamplePermit.email_address}, - ${noSamplePermit.coordinator_agency}, - ${systemUserId} - ) - RETURNING - permit_id as id; - `; - - return sqlStatement; -}; diff --git a/api/src/queries/permit/permit-update-queries.test.ts b/api/src/queries/permit/permit-update-queries.test.ts deleted file mode 100644 index 435b39d993..0000000000 --- a/api/src/queries/permit/permit-update-queries.test.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { associatePermitToProjectSQL } from './permit-update-queries'; - -describe('associatePermitToProjectSQL', () => { - it('returns null when no permit id', () => { - const response = associatePermitToProjectSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null when no project id', () => { - const response = associatePermitToProjectSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when all fields are passed in as expected', () => { - const response = associatePermitToProjectSQL(123, 2); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(123); - }); -}); diff --git a/api/src/queries/permit/permit-update-queries.ts b/api/src/queries/permit/permit-update-queries.ts deleted file mode 100644 index 5bb02d3aa5..0000000000 --- a/api/src/queries/permit/permit-update-queries.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to associate existing non-sampling permits to a project - * - * @param {number} permitId - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const associatePermitToProjectSQL = (permitId: number, projectId: number): SQLStatement | null => { - if (!permitId || !projectId) { - return null; - } - - return SQL` - UPDATE permit - SET - project_id = ${projectId}, - coordinator_first_name = NULL, - coordinator_last_name = NULL, - coordinator_email_address = NULL, - coordinator_agency_name = NULL - WHERE - permit_id = ${permitId}; - `; -}; diff --git a/api/src/queries/permit/permit-view-queries.test.ts b/api/src/queries/permit/permit-view-queries.test.ts deleted file mode 100644 index f57a5d822f..0000000000 --- a/api/src/queries/permit/permit-view-queries.test.ts +++ /dev/null @@ -1,31 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { getAllPermitsSQL, getNonSamplingPermitsSQL } from './permit-view-queries'; - -describe('getNonSamplingPermitsSQL', () => { - it('returns null when no system user id', () => { - const response = getNonSamplingPermitsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when all fields are passed in as expected', () => { - const response = getNonSamplingPermitsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getAllPermitsSQL', () => { - it('returns null when no system user id', () => { - const response = getAllPermitsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when all fields are passed in as expected', () => { - const response = getAllPermitsSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/permit/permit-view-queries.ts b/api/src/queries/permit/permit-view-queries.ts deleted file mode 100644 index c90d8a701d..0000000000 --- a/api/src/queries/permit/permit-view-queries.ts +++ /dev/null @@ -1,58 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to get all non-sampling permits - * - * @param {number | null} systemUserId - * @returns {SQLStatement} sql query object - */ -export const getNonSamplingPermitsSQL = (systemUserId: number | null): SQLStatement | null => { - if (!systemUserId) { - return null; - } - - return SQL` - SELECT - permit_id, - number, - type - FROM - permit - WHERE - system_user_id = ${systemUserId} - AND - project_id IS NULL; - `; -}; - -/** - * SQL query to get all permit numbers by system user id - * - * @param {number | null} systemUserId - * @returns {SQLStatement} sql query object - */ -export const getAllPermitsSQL = (systemUserId: number | null): SQLStatement | null => { - if (!systemUserId) { - return null; - } - - return SQL` - SELECT - per.permit_id as id, - per.number, - per.type, - CASE - WHEN per.project_id IS NULL THEN per.coordinator_agency_name - WHEN per.project_id IS NOT NULL THEN p.coordinator_agency_name - END as coordinator_agency, - p.name as project_name - FROM - permit as per - LEFT OUTER JOIN - project as p - ON - p.project_id = per.project_id - WHERE - system_user_id = ${systemUserId}; - `; -}; diff --git a/api/src/queries/project-participation/project-participation-queries.test.ts b/api/src/queries/project-participation/project-participation-queries.test.ts index ab9cdb82e3..e5854e4d2c 100644 --- a/api/src/queries/project-participation/project-participation-queries.test.ts +++ b/api/src/queries/project-participation/project-participation-queries.test.ts @@ -1,12 +1,6 @@ import { expect } from 'chai'; import { describe } from 'mocha'; -import { - addProjectRoleByRoleNameSQL, - deleteProjectParticipationSQL, - getAllProjectParticipantsSQL, - getAllUserProjectsSQL, - getProjectParticipationBySystemUserSQL -} from './project-participation-queries'; +import { getAllUserProjectsSQL } from './project-participation-queries'; describe('getAllUserProjectsSQL', () => { it('returns null response when null userId provided', () => { @@ -21,77 +15,3 @@ describe('getAllUserProjectsSQL', () => { expect(response).to.not.be.null; }); }); - -describe('getProjectParticipationBySystemUserSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectParticipationBySystemUserSQL((null as unknown) as number, 2); - - expect(response).to.be.null; - }); - - it('returns null response when null systemUserId provided', () => { - const response = getProjectParticipationBySystemUserSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when null valid params provided', () => { - const response = getProjectParticipationBySystemUserSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getAllProjectParticipantsSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getAllProjectParticipantsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns null response when valid params provided', () => { - const response = getAllProjectParticipantsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('addProjectRoleByRoleNameSQL', () => { - it('returns null response when null projectId provided', () => { - const response = addProjectRoleByRoleNameSQL((null as unknown) as number, 2, 'role'); - - expect(response).to.be.null; - }); - - it('returns null response when null systemUserId provided', () => { - const response = addProjectRoleByRoleNameSQL(1, (null as unknown) as number, 'role'); - - expect(response).to.be.null; - }); - - it('returns null response when null/empty projectParticipantRole provided', () => { - const response = addProjectRoleByRoleNameSQL(1, 2, ''); - - expect(response).to.be.null; - }); - - it('returns non null response when valid parameters provided', () => { - const response = addProjectRoleByRoleNameSQL(1, 2, 'role'); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteProjectParticipationSQL', () => { - it('returns null response when null projectParticipationId provided', () => { - const response = deleteProjectParticipationSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid parameters provided', () => { - const response = deleteProjectParticipationSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/project-participation/project-participation-queries.ts b/api/src/queries/project-participation/project-participation-queries.ts index aa4b9b3b20..b1aa6da416 100644 --- a/api/src/queries/project-participation/project-participation-queries.ts +++ b/api/src/queries/project-participation/project-participation-queries.ts @@ -72,175 +72,3 @@ export const getAllUserProjectsSQL = (userId: number): SQLStatement | null => { pp.system_user_id = ${userId}; `; }; - -/** - * SQL query to add a single project role to a user. - * - * @param {number} projectId - * @param {number} systemUserId - * @param {string} projectParticipantRole - * @return {*} {(SQLStatement | null)} - */ -export const getProjectParticipationBySystemUserSQL = ( - projectId: number, - systemUserId: number -): SQLStatement | null => { - if (!projectId || !systemUserId) { - return null; - } - - return SQL` - SELECT - pp.project_id, - pp.system_user_id, - su.record_end_date, - array_remove(array_agg(pr.project_role_id), NULL) AS project_role_ids, - array_remove(array_agg(pr.name), NULL) AS project_role_names - FROM - project_participation pp - LEFT JOIN - project_role pr - ON - pp.project_role_id = pr.project_role_id - LEFT JOIN - system_user su - ON - pp.system_user_id = su.system_user_id - WHERE - pp.project_id = ${projectId} - AND - pp.system_user_id = ${systemUserId} - AND - su.record_end_date is NULL - GROUP BY - pp.project_id, - pp.system_user_id, - su.record_end_date ; - `; -}; - -/** - * SQL query to get all project participants. - * - * @param {projectId} projectId - * @returns {SQLStatement} sql query object - */ -export const getAllProjectParticipantsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - pp.project_participation_id, - pp.project_id, - pp.system_user_id, - pp.project_role_id, - pr.name project_role_name, - su.user_identifier, - su.user_identity_source_id - FROM - project_participation pp - LEFT JOIN - system_user su - ON - pp.system_user_id = su.system_user_id - LEFT JOIN - project_role pr - ON - pr.project_role_id = pp.project_role_id - WHERE - pp.project_id = ${projectId}; - `; -}; - -/** - * SQL query to add a single project role to a user. - * - * @param {number} projectId - * @param {number} systemUserId - * @param {string} projectParticipantRole - * @return {*} {(SQLStatement | null)} - */ -export const addProjectRoleByRoleNameSQL = ( - projectId: number, - systemUserId: number, - projectParticipantRole: string -): SQLStatement | null => { - if (!projectId || !systemUserId || !projectParticipantRole) { - return null; - } - - return SQL` - INSERT INTO project_participation ( - project_id, - system_user_id, - project_role_id - ) - ( - SELECT - ${projectId}, - ${systemUserId}, - project_role_id - FROM - project_role - WHERE - name = ${projectParticipantRole} - ) - RETURNING - *; - `; -}; - -/** - * SQL query to add a single project role to a user. - * - * @param {number} projectId - * @param {number} systemUserId - * @param {string} projectParticipantRole - * @return {*} {(SQLStatement | null)} - */ -export const addProjectRoleByRoleIdSQL = ( - projectId: number, - systemUserId: number, - projectParticipantRoleId: number -): SQLStatement | null => { - if (!projectId || !systemUserId || !projectParticipantRoleId) { - return null; - } - - return SQL` - INSERT INTO project_participation ( - project_id, - system_user_id, - project_role_id - ) VALUES ( - ${projectId}, - ${systemUserId}, - ${projectParticipantRoleId} - ) - RETURNING - *; - `; -}; - -/** - * SQL query to delete a single project participation record. - * - * @param {number} projectParticipationId - * @return {*} {(SQLStatement | null)} - */ -export const deleteProjectParticipationSQL = (projectParticipationId: number): SQLStatement | null => { - if (!projectParticipationId) { - return null; - } - - return SQL` - DELETE FROM - project_participation - WHERE - project_participation_id = ${projectParticipationId} - RETURNING - *; - `; -}; diff --git a/api/src/queries/project/draft/draft-queries.test.ts b/api/src/queries/project/draft/draft-queries.test.ts index a9aac98daf..bb98c38290 100644 --- a/api/src/queries/project/draft/draft-queries.test.ts +++ b/api/src/queries/project/draft/draft-queries.test.ts @@ -1,6 +1,6 @@ import { expect } from 'chai'; import { describe } from 'mocha'; -import { deleteDraftSQL, getDraftSQL, getDraftsSQL, postDraftSQL, putDraftSQL } from './draft-queries'; +import { getDraftsSQL, postDraftSQL, putDraftSQL } from './draft-queries'; describe('postDraftSQL', () => { it('Null systemUserId', () => { @@ -71,27 +71,3 @@ describe('getDraftsSQL', () => { expect(response).to.not.be.null; }); }); - -describe('getDraftSQL', () => { - it('Null draftId', () => { - const response = getDraftSQL((null as unknown) as number); - expect(response).to.be.null; - }); - - it('Valid parameters', () => { - const response = getDraftSQL(1); - expect(response).to.not.be.null; - }); -}); - -describe('deleteDraftSQL', () => { - it('Null draftId', () => { - const response = deleteDraftSQL((null as unknown) as number); - expect(response).to.be.null; - }); - - it('Valid parameters', () => { - const response = deleteDraftSQL(1); - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/project/draft/draft-queries.ts b/api/src/queries/project/draft/draft-queries.ts index da2b9b45f7..757cb60cee 100644 --- a/api/src/queries/project/draft/draft-queries.ts +++ b/api/src/queries/project/draft/draft-queries.ts @@ -89,47 +89,3 @@ export const getDraftsSQL = (systemUserId: number): SQLStatement | null => { return sqlStatement; }; - -/** - * SQL query to get a single draft from the webform_draft table. - * - * @param {number} draftId - * @return {SQLStatement} {(SQLStatement | null)} - */ -export const getDraftSQL = (draftId: number): SQLStatement | null => { - if (!draftId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - webform_draft_id as id, - name, - data - FROM - webform_draft - WHERE - webform_draft_id = ${draftId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete a single draft from the webform_draft table. - * - * @param {number} draftId - * @return {SQLStatement} {(SQLStatement) | null} - */ -export const deleteDraftSQL = (draftId: number): SQLStatement | null => { - if (!draftId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE from webform_draft - WHERE webform_draft_id = ${draftId}; - `; - - return sqlStatement; -}; diff --git a/api/src/queries/project/index.ts b/api/src/queries/project/index.ts index f34215dc0d..7f83424ec1 100644 --- a/api/src/queries/project/index.ts +++ b/api/src/queries/project/index.ts @@ -1,15 +1,5 @@ import draft from './draft'; -import * as projectAttachments from './project-attachments-queries'; -import * as projectCreate from './project-create-queries'; -import * as projectDelete from './project-delete-queries'; -import * as projectUpdate from './project-update-queries'; -import * as projectView from './project-view-queries'; export default { - ...projectAttachments, - ...projectCreate, - ...projectDelete, - ...projectUpdate, - ...projectView, draft }; diff --git a/api/src/queries/project/project-attachments-queries.test.ts b/api/src/queries/project/project-attachments-queries.test.ts deleted file mode 100644 index 84dfa76edd..0000000000 --- a/api/src/queries/project/project-attachments-queries.test.ts +++ /dev/null @@ -1,436 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { IReportAttachmentAuthor, PutReportAttachmentMetadata } from '../../models/project-survey-attachments'; -import { - deleteProjectAttachmentSQL, - deleteProjectReportAttachmentAuthorsSQL, - deleteProjectReportAttachmentSQL, - getProjectAttachmentByFileNameSQL, - getProjectAttachmentS3KeySQL, - getProjectAttachmentsSQL, - getProjectReportAttachmentByFileNameSQL, - getProjectReportAttachmentS3KeySQL, - getProjectReportAttachmentSQL, - getProjectReportAttachmentsSQL, - getProjectReportAuthorsSQL, - insertProjectReportAttachmentAuthorSQL, - postProjectAttachmentSQL, - postProjectReportAttachmentSQL, - putProjectAttachmentSQL, - putProjectReportAttachmentSQL, - updateProjectReportAttachmentMetadataSQL -} from './project-attachments-queries'; - -const post_sample_attachment_meta = { - title: 'title', - year_published: 2000, - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ], - description: 'description' -}; - -const put_sample_attachment_meta = { - title: 'title', - year_published: 2000, - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ], - description: 'description', - revision_count: 0 -}; - -describe('getProjectAttachmentsSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectAttachmentsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = getProjectAttachmentsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectReportAttachmentsSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectReportAttachmentsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = getProjectReportAttachmentsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteProjectAttachmentSQL', () => { - it('returns null response when null attachmentId provided', () => { - const response = deleteProjectAttachmentSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid attachmentId provided', () => { - const response = deleteProjectAttachmentSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteProjectReportAttachmentSQL', () => { - it('returns null response when null attachmentId provided', () => { - const response = deleteProjectReportAttachmentSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid attachmentId provided', () => { - const response = deleteProjectReportAttachmentSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectAttachmentS3KeySQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectAttachmentS3KeySQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = getProjectAttachmentS3KeySQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and attachmentId provided', () => { - const response = getProjectAttachmentS3KeySQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectReportAttachmentS3KeySQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectReportAttachmentS3KeySQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = getProjectReportAttachmentS3KeySQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and attachmentId provided', () => { - const response = getProjectReportAttachmentS3KeySQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('postProjectAttachmentSQL', () => { - it('returns null response when null projectId provided', () => { - const response = postProjectAttachmentSQL('name', 20, 'type', (null as unknown) as number, 'key'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = postProjectAttachmentSQL((null as unknown) as string, 20, 'type', 1, 'key'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileSize provided', () => { - const response = postProjectAttachmentSQL('name', (null as unknown) as number, 'type', 1, 'key'); - - expect(response).to.be.null; - }); - - it('returns null response when null key provided', () => { - const response = postProjectAttachmentSQL('name', 2, 'type', 1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns null response when null fileType provided', () => { - const response = postProjectAttachmentSQL('name', 2, (null as unknown) as string, 1, 'key'); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and fileName and fileSize and key and fileType provided', () => { - const response = postProjectAttachmentSQL('name', 20, 'type', 1, 'key'); - - expect(response).to.not.be.null; - }); -}); - -describe('postProjectReportAttachmentSQL', () => { - it('returns null response when null projectId provided', () => { - const response = postProjectReportAttachmentSQL( - 'name', - 20, - (null as unknown) as number, - 'key', - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = postProjectReportAttachmentSQL( - (null as unknown) as string, - 20, - 1, - 'key', - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null fileSize provided', () => { - const response = postProjectReportAttachmentSQL( - 'name', - (null as unknown) as number, - 1, - 'key', - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null key provided', () => { - const response = postProjectReportAttachmentSQL( - 'name', - 2, - 1, - (null as unknown) as string, - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and fileName and fileSize and key provided', () => { - const response = postProjectReportAttachmentSQL('name', 20, 1, 'key', post_sample_attachment_meta); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectAttachmentByFileNameSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectAttachmentByFileNameSQL((null as unknown) as number, 'name'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = getProjectAttachmentByFileNameSQL(1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and fileName provided', () => { - const response = getProjectAttachmentByFileNameSQL(1, 'name'); - - expect(response).to.not.be.null; - }); -}); -describe('getProjectReportAttachmentByFileNameSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectReportAttachmentByFileNameSQL((null as unknown) as number, 'name'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = getProjectReportAttachmentByFileNameSQL(1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and fileName provided', () => { - const response = getProjectReportAttachmentByFileNameSQL(1, 'name'); - - expect(response).to.not.be.null; - }); -}); - -describe('putProjectAttachmentSQL', () => { - it('returns null response when null projectId provided', () => { - const response = putProjectAttachmentSQL((null as unknown) as number, 'name', 'type'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = putProjectAttachmentSQL(1, (null as unknown) as string, 'type'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileType provided', () => { - const response = putProjectAttachmentSQL(1, 'name', (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and fileName and fileType provided', () => { - const response = putProjectAttachmentSQL(1, 'name', 'type'); - - expect(response).to.not.be.null; - }); -}); - -describe('putProjectReportAttachmentSQL', () => { - it('returns null response when null projectId provided', () => { - const response = putProjectReportAttachmentSQL((null as unknown) as number, 'name', put_sample_attachment_meta); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = putProjectReportAttachmentSQL(1, (null as unknown) as string, put_sample_attachment_meta); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and fileName provided', () => { - const response = putProjectReportAttachmentSQL(1, 'name', put_sample_attachment_meta); - - expect(response).to.not.be.null; - }); -}); - -describe('updateProjectReportAttachmentMetadataSQL', () => { - it('returns null response when null projectId provided', () => { - const response = updateProjectReportAttachmentMetadataSQL( - (null as unknown) as number, - 1, - put_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = updateProjectReportAttachmentMetadataSQL( - 1, - (null as unknown) as number, - put_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null metadata provided', () => { - const response = updateProjectReportAttachmentMetadataSQL(1, 1, (null as unknown) as PutReportAttachmentMetadata); - - expect(response).to.be.null; - }); - - it('returns not null response when valid parameters are provided', () => { - const response = updateProjectReportAttachmentMetadataSQL(1, 1, put_sample_attachment_meta); - - expect(response).to.not.be.null; - }); -}); - -describe('insertProjectReportAttachmentAuthorSQL', () => { - const report_attachment_author: IReportAttachmentAuthor = { - first_name: 'John', - last_name: 'Smith' - }; - it('returns null response when null attachmentId provided', () => { - const response = insertProjectReportAttachmentAuthorSQL((null as unknown) as number, report_attachment_author); - - expect(response).to.be.null; - }); - - it('returns null response when null report author provided', () => { - const response = insertProjectReportAttachmentAuthorSQL(1, (null as unknown) as IReportAttachmentAuthor); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmmentId and null report author are provided', () => { - const response = insertProjectReportAttachmentAuthorSQL( - (null as unknown) as number, - (null as unknown) as IReportAttachmentAuthor - ); - expect(response).to.be.null; - }); - - it('returns not null response when valid parameters are provided', () => { - const response = insertProjectReportAttachmentAuthorSQL(1, report_attachment_author); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteProjectReportAttachmentAuthorsSQL', () => { - it('returns null response when null attachmentId provided', () => { - const response = deleteProjectReportAttachmentAuthorsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns not null response when valid params are provided', () => { - const response = deleteProjectReportAttachmentAuthorsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectReportAttachmentSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getProjectReportAttachmentSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = getProjectReportAttachmentSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId and attachmentId provided', () => { - const response = getProjectReportAttachmentSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectReportAuthorSQL', () => { - it('returns null response when null projectReportAttachmentId provided', () => { - const response = getProjectReportAuthorsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectReportAttachmentId provided', () => { - const response = getProjectReportAuthorsSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/project/project-attachments-queries.ts b/api/src/queries/project/project-attachments-queries.ts deleted file mode 100644 index 8392690d76..0000000000 --- a/api/src/queries/project/project-attachments-queries.ts +++ /dev/null @@ -1,547 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { - IReportAttachmentAuthor, - PostReportAttachmentMetadata, - PutReportAttachmentMetadata -} from '../../models/project-survey-attachments'; - -/** - * SQL query to get attachments for a single project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectAttachmentsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - project_attachment_id as id, - file_name, - file_type, - update_date, - create_date, - file_size, - key, - security_token - from - project_attachment - where - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get report attachments for a single project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectReportAttachmentsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - project_report_attachment_id as id, - file_name, - update_date, - create_date, - file_size, - key, - security_token - from - project_report_attachment - where - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete an attachment for a single project. - * - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const deleteProjectAttachmentSQL = (attachmentId: number): SQLStatement | null => { - if (!attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from project_attachment - WHERE - project_attachment_id = ${attachmentId} - RETURNING - key; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete a report attachment for a single project. - * - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const deleteProjectReportAttachmentSQL = (attachmentId: number): SQLStatement | null => { - if (!attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from project_report_attachment - WHERE - project_report_attachment_id = ${attachmentId} - RETURNING - key; - `; - - return sqlStatement; -}; - -/** - * SQL query to get S3 key of an attachment for a single project. - * - * @param {number} projectId - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const getProjectAttachmentS3KeySQL = (projectId: number, attachmentId: number): SQLStatement | null => { - if (!projectId || !attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - key - FROM - project_attachment - WHERE - project_id = ${projectId} - AND - project_attachment_id = ${attachmentId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get S3 key of a report attachment for a single project. - * - * @param {number} projectId - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const getProjectReportAttachmentS3KeySQL = (projectId: number, attachmentId: number): SQLStatement | null => { - if (!projectId || !attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - key - FROM - project_report_attachment - WHERE - project_id = ${projectId} - AND - project_report_attachment_id = ${attachmentId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a project attachment row. - * - * @param fileName - * @param fileSize - * @param fileType - * @param projectId - * @param {string} key to use in s3 - * @returns {SQLStatement} sql query object - */ -export const postProjectAttachmentSQL = ( - fileName: string, - fileSize: number, - fileType: string, - projectId: number, - key: string -): SQLStatement | null => { - if (!fileName || !fileSize || !fileType || !projectId || !key) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_attachment ( - project_id, - file_name, - file_size, - file_type, - key - ) VALUES ( - ${projectId}, - ${fileName}, - ${fileSize}, - ${fileType}, - ${key} - ) - RETURNING - project_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a project report attachment row. - * - * @param fileName - * @param fileSize - * @param projectId - * @param {string} key to use in s3 - * @returns {SQLStatement} sql query object - */ -export const postProjectReportAttachmentSQL = ( - fileName: string, - fileSize: number, - projectId: number, - key: string, - attachmentMeta: PostReportAttachmentMetadata -): SQLStatement | null => { - if ( - !fileName || - !fileSize || - !projectId || - !key || - !attachmentMeta?.title || - !attachmentMeta?.year_published || - !attachmentMeta?.description - ) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_report_attachment ( - project_id, - file_name, - title, - year, - description, - file_size, - key - ) VALUES ( - ${projectId}, - ${fileName}, - ${attachmentMeta.title}, - ${attachmentMeta.year_published}, - ${attachmentMeta.description}, - ${fileSize}, - ${key} - ) - RETURNING - project_report_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -/** - * SQL query to get an attachment for a single project by project id and filename. - * - * @param {number} projectId - * @param {string} fileName - * @returns {SQLStatement} sql query object - */ -export const getProjectAttachmentByFileNameSQL = (projectId: number, fileName: string): SQLStatement | null => { - if (!projectId || !fileName) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - project_attachment_id as id, - file_name, - update_date, - create_date, - file_size - from - project_attachment - where - project_id = ${projectId} - and - file_name = ${fileName}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get an attachment for a single project by project id and filename. - * - * @param {number} projectId - * @param {string} fileName - * @returns {SQLStatement} sql query object - */ -export const getProjectReportAttachmentByFileNameSQL = (projectId: number, fileName: string): SQLStatement | null => { - if (!projectId || !fileName) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - project_report_attachment_id as id, - file_name, - update_date, - create_date, - file_size - from - project_report_attachment - where - project_id = ${projectId} - and - file_name = ${fileName}; - `; - - return sqlStatement; -}; - -/** - * SQL query to update an attachment for a single project by project id and filename and filetype. - * - * @param {number} projectId - * @param {string} fileName - * @param {string} fileType - * @returns {SQLStatement} sql query object - */ -export const putProjectAttachmentSQL = (projectId: number, fileName: string, fileType: string): SQLStatement | null => { - if (!projectId || !fileName || !fileType) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - project_attachment - SET - file_name = ${fileName}, - file_type = ${fileType} - WHERE - file_name = ${fileName} - AND - project_id = ${projectId} - RETURNING - project_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -/** - * SQL query to update a report attachment for a single project by project id and filename. - * - * @param {number} projectId - * @param {string} fileName - * @returns {SQLStatement} sql query object - */ -export const putProjectReportAttachmentSQL = ( - projectId: number, - fileName: string, - attachmentMeta: PutReportAttachmentMetadata -): SQLStatement | null => { - if ( - !projectId || - !fileName || - !attachmentMeta?.title || - !attachmentMeta?.year_published || - !attachmentMeta?.description - ) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - project_report_attachment - SET - file_name = ${fileName}, - title = ${attachmentMeta.title}, - year = ${attachmentMeta.year_published}, - description = ${attachmentMeta.description} - WHERE - file_name = ${fileName} - AND - project_id = ${projectId} - RETURNING - project_report_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -export interface ReportAttachmentMeta { - title: string; - description: string; - yearPublished: string; -} - -/** - * Update the metadata fields of project report attachment, for tjhe specified `projectId` and `attachmentId`. - * - * @param {number} projectId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const updateProjectReportAttachmentMetadataSQL = ( - projectId: number, - attachmentId: number, - metadata: PutReportAttachmentMetadata -): SQLStatement | null => { - if (!projectId || !attachmentId || !metadata) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - project_report_attachment - SET - title = ${metadata.title}, - year = ${metadata.year_published}, - description = ${metadata.description} - WHERE - project_id = ${projectId} - AND - project_report_attachment_id = ${attachmentId} - AND - revision_count = ${metadata.revision_count}; - `; - - return sqlStatement; -}; - -/** - * Insert a new project report attachment author record, for the specified `attachmentId` - * - * @param {number} attachmentId - * @param {IReportAttachmentAuthor} author - * @return {*} {(SQLStatement | null)} - */ -export const insertProjectReportAttachmentAuthorSQL = ( - attachmentId: number, - author: IReportAttachmentAuthor -): SQLStatement | null => { - if (!attachmentId || !author) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_report_author ( - project_report_attachment_id, - first_name, - last_name - ) VALUES ( - ${attachmentId}, - ${author.first_name}, - ${author.last_name} - ); - `; - - return sqlStatement; -}; - -/** - * Delete all project report attachment author records, for the specified `attachmentId`. - * - * @param {number} attachmentId - * @return {*} {(SQLStatement | null)} - */ -export const deleteProjectReportAttachmentAuthorsSQL = (attachmentId: number): SQLStatement | null => { - if (!attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - FROM project_report_author - WHERE - project_report_attachment_id = ${attachmentId}; - `; - - return sqlStatement; -}; - -/** - * Get the metadata fields of project report attachment, for the specified `projectId` and `attachmentId`. - * - * @param {number} projectId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const getProjectReportAttachmentSQL = (projectId: number, attachmentId: number): SQLStatement | null => { - if (!projectId || !attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - project_report_attachment_id as attachment_id, - file_name, - title, - description, - year as year_published, - update_date, - create_date, - file_size, - key, - security_token, - revision_count - FROM - project_report_attachment - where - project_report_attachment_id = ${attachmentId} - and - project_id = ${projectId} - `; - - return sqlStatement; -}; - -/** - * Get the metadata fields of project report attachment, for the specified `projectId` and `attachmentId`. - * - * @param {number} projectId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const getProjectReportAuthorsSQL = (projectReportAttachmentId: number): SQLStatement | null => { - if (!projectReportAttachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - project_report_author.* - FROM - project_report_author - where - project_report_attachment_id = ${projectReportAttachmentId} - `; - - return sqlStatement; -}; diff --git a/api/src/queries/project/project-create-queries.test.ts b/api/src/queries/project/project-create-queries.test.ts deleted file mode 100644 index b21faa9835..0000000000 --- a/api/src/queries/project/project-create-queries.test.ts +++ /dev/null @@ -1,297 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - PostCoordinatorData, - PostFundingSource, - PostLocationData, - PostObjectivesData, - PostProjectData -} from '../../models/project-create'; -import { - postProjectActivitySQL, - postProjectFundingSourceSQL, - postProjectIndigenousNationSQL, - postProjectIUCNSQL, - postProjectSQL, - postProjectStakeholderPartnershipSQL -} from './project-create-queries'; - -describe('postProjectSQL', () => { - describe('Null project param provided', () => { - it('returns null', () => { - // force the function to accept a null value - const response = postProjectSQL( - (null as unknown) as PostProjectData & PostLocationData & PostCoordinatorData & PostObjectivesData - ); - - expect(response).to.be.null; - }); - }); - - describe('Valid project param provided', () => { - const projectData = { - name: 'name_test_data', - objectives: 'objectives_test_data', - start_date: 'start_date_test_data', - end_date: 'end_date_test_data', - caveats: 'caveats_test_data', - comments: 'comments_test_data' - }; - - const coordinatorData = { - first_name: 'coordinator_first_name', - last_name: 'coordinator_last_name', - email_address: 'coordinator_email_address@email.com', - coordinator_agency: 'coordinator_agency_name', - share_contact_details: false - }; - - const locationData = { - location_description: 'a location description' - }; - - const objectivesData = { - objectives: 'an objective', - caveats: 'a caveat maybe' - }; - - const postProjectData = new PostProjectData(projectData); - const postCoordinatorData = new PostCoordinatorData(coordinatorData); - const postObjectivesData = new PostObjectivesData(objectivesData); - - it('returns a SQLStatement', () => { - const postLocationData = new PostLocationData(locationData); - const response = postProjectSQL({ - ...postProjectData, - ...postCoordinatorData, - ...postLocationData, - ...postObjectivesData - }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement with a single geometry inserted correctly', () => { - const locationDataWithGeo = { - ...locationData, - geometry: [ - { - type: 'Feature', - id: 'myGeo', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-128, 55], - [-128, 55.5], - [-128, 56], - [-126, 58], - [-128, 55] - ] - ] - }, - properties: { - name: 'Biohub Islands' - } - } - ] - }; - - const postLocationData = new PostLocationData(locationDataWithGeo); - const response = postProjectSQL({ - ...postProjectData, - ...postCoordinatorData, - ...postLocationData, - ...postObjectivesData - }); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include( - '{"type":"Polygon","coordinates":[[[-128,55],[-128,55.5],[-128,56],[-126,58],[-128,55]]]}' - ); - }); - - it('returns a SQLStatement with multiple geometries inserted correctly', () => { - const locationDataWithGeos = { - ...locationData, - geometry: [ - { - type: 'Feature', - id: 'myGeo1', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-128, 55], - [-128, 55.5], - [-128, 56], - [-126, 58], - [-128, 55] - ] - ] - }, - properties: { - name: 'Biohub Islands 1' - } - }, - { - type: 'Feature', - id: 'myGeo2', - geometry: { - type: 'Point', - coordinates: [-128, 55] - }, - properties: { - name: 'Biohub Islands 2' - } - } - ] - }; - - const postLocationData = new PostLocationData(locationDataWithGeos); - const response = postProjectSQL({ - ...postProjectData, - ...postCoordinatorData, - ...postLocationData, - ...postObjectivesData - }); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include( - '{"type":"Polygon","coordinates":[[[-128,55],[-128,55.5],[-128,56],[-126,58],[-128,55]]]}' - ); - expect(response?.values).to.deep.include('{"type":"Point","coordinates":[-128,55]}'); - }); - }); -}); - -describe('postProjectFundingSourceSQL', () => { - describe('with invalid parameters', () => { - it('returns null when funding source is null', () => { - const response = postProjectFundingSourceSQL((null as unknown) as PostFundingSource, 1); - - expect(response).to.be.null; - }); - - it('returns null when project id is null', () => { - const response = postProjectFundingSourceSQL(new PostFundingSource({}), (null as unknown) as number); - - expect(response).to.be.null; - }); - }); - - describe('with valid parameters', () => { - it('returns a SQLStatement when all fields are passed in as expected', () => { - const response = postProjectFundingSourceSQL( - new PostFundingSource({ - agency_id: 111, - investment_action_category: 222, - agency_project_id: '123123123', - funding_amount: 10000, - start_date: '2020-02-02', - end_date: '2020-03-02' - }), - 333 - ); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(333); - expect(response?.values).to.deep.include(222); - expect(response?.values).to.deep.include('123123123'); - expect(response?.values).to.deep.include(10000); - expect(response?.values).to.deep.include('2020-02-02'); - expect(response?.values).to.deep.include('2020-03-02'); - }); - }); -}); - -describe('postProjectStakeholderPartnershipSQL', () => { - it('Null activityId', () => { - const response = postProjectStakeholderPartnershipSQL((null as unknown) as string, 1); - expect(response).to.be.null; - }); - - it('Null projectId', () => { - const response = postProjectStakeholderPartnershipSQL('123', (null as unknown) as number); - expect(response).to.be.null; - }); - - it('null activityId and null projectId', () => { - const response = postProjectStakeholderPartnershipSQL((null as unknown) as string, (null as unknown) as number); - expect(response).to.be.null; - }); - - it('Valid parameters', () => { - const response = postProjectStakeholderPartnershipSQL('123', 1); - expect(response).to.not.be.null; - }); -}); - -describe('postProjectIndigenousNationSQL', () => { - it('Null activityId', () => { - const response = postProjectIndigenousNationSQL((null as unknown) as number, 1); - expect(response).to.be.null; - }); - - it('Null projectId', () => { - const response = postProjectIndigenousNationSQL(1, (null as unknown) as number); - expect(response).to.be.null; - }); - - it('null activityId and null projectId', () => { - const response = postProjectIndigenousNationSQL((null as unknown) as number, (null as unknown) as number); - expect(response).to.be.null; - }); - - it('Valid parameters', () => { - const response = postProjectIndigenousNationSQL(1, 1); - expect(response).to.not.be.null; - }); -}); - -describe('postProjectIUCNSQL', () => { - describe('with invalid parameters', () => { - it('returns null when no iucn id', () => { - const response = postProjectIUCNSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null when no project id', () => { - const response = postProjectIUCNSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - }); - - describe('with valid parameters', () => { - it('returns a SQLStatement when all fields are passed in as expected', () => { - const response = postProjectIUCNSQL(1, 123); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(123); - }); - }); -}); - -describe('postProjectActivitySQL', () => { - it('Null activityId', () => { - const response = postProjectActivitySQL((null as unknown) as number, 1); - expect(response).to.be.null; - }); - - it('Null projectId', () => { - const response = postProjectActivitySQL(1, (null as unknown) as number); - expect(response).to.be.null; - }); - - it('null activityId and null projectId', () => { - const response = postProjectActivitySQL((null as unknown) as number, (null as unknown) as number); - expect(response).to.be.null; - }); - - it('Valid parameters', () => { - const response = postProjectActivitySQL(1, 1); - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/project/project-create-queries.ts b/api/src/queries/project/project-create-queries.ts deleted file mode 100644 index de27d71546..0000000000 --- a/api/src/queries/project/project-create-queries.ts +++ /dev/null @@ -1,233 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { - PostCoordinatorData, - PostFundingSource, - PostLocationData, - PostObjectivesData, - PostProjectData -} from '../../models/project-create'; -import { queries } from '../queries'; - -/** - * SQL query to insert a project row. - * - * @param {(PostProjectData & PostLocationData & PostCoordinatorData & PostObjectivesData)} project - * @returns {SQLStatement} sql query object - */ -export const postProjectSQL = ( - project: PostProjectData & PostLocationData & PostCoordinatorData & PostObjectivesData -): SQLStatement | null => { - if (!project) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project ( - project_type_id, - name, - objectives, - location_description, - start_date, - end_date, - caveats, - comments, - coordinator_first_name, - coordinator_last_name, - coordinator_email_address, - coordinator_agency_name, - coordinator_public, - geojson, - geography - ) VALUES ( - ${project.type}, - ${project.name}, - ${project.objectives}, - ${project.location_description}, - ${project.start_date}, - ${project.end_date}, - ${project.caveats}, - ${project.comments}, - ${project.first_name}, - ${project.last_name}, - ${project.email_address}, - ${project.coordinator_agency}, - ${project.share_contact_details}, - ${JSON.stringify(project.geometry)} - `; - - if (project.geometry && project.geometry.length) { - const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(project.geometry); - - sqlStatement.append(SQL` - ,public.geography( - public.ST_Force2D( - public.ST_SetSRID( - `); - - sqlStatement.append(geometryCollectionSQL); - - sqlStatement.append(SQL` - , 4326))) - `); - } else { - sqlStatement.append(SQL` - ,null - `); - } - - sqlStatement.append(SQL` - ) - RETURNING - project_id as id; - `); - - return sqlStatement; -}; - -/** - * SQL query to insert a project funding source row. - * - * @param {PostFundingSource} fundingSource - * @returns {SQLStatement} sql query object - */ -export const postProjectFundingSourceSQL = ( - fundingSource: PostFundingSource, - projectId: number -): SQLStatement | null => { - if (!fundingSource || !projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_funding_source ( - project_id, - investment_action_category_id, - funding_source_project_id, - funding_amount, - funding_start_date, - funding_end_date - ) VALUES ( - ${projectId}, - ${fundingSource.investment_action_category}, - ${fundingSource.agency_project_id}, - ${fundingSource.funding_amount}, - ${fundingSource.start_date}, - ${fundingSource.end_date} - ) - RETURNING - project_funding_source_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a project stakeholder partnership row. - * - * @param {string} stakeholderPartnership - * @returns {SQLStatement} sql query object - */ -export const postProjectStakeholderPartnershipSQL = ( - stakeholderPartnership: string, - projectId: number -): SQLStatement | null => { - if (!stakeholderPartnership || !projectId) { - return null; - } - - // TODO model is missing agency name - const sqlStatement: SQLStatement = SQL` - INSERT INTO stakeholder_partnership ( - project_id, - name - ) VALUES ( - ${projectId}, - ${stakeholderPartnership} - ) - RETURNING - stakeholder_partnership_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a project indigenous nation row. - * - * @param {string} indigenousNationId - * @returns {SQLStatement} sql query object - */ -export const postProjectIndigenousNationSQL = (indigenousNationId: number, projectId: number): SQLStatement | null => { - if (!indigenousNationId || !projectId) { - return null; - } - - // TODO model is missing agency name - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_first_nation ( - project_id, - first_nations_id - ) VALUES ( - ${projectId}, - ${indigenousNationId} - ) - RETURNING - first_nations_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a project IUCN row. - * - * @param iucn3_id - * @param project_id - * @returns {SQLStatement} sql query object - */ -export const postProjectIUCNSQL = (iucn3_id: number, project_id: number): SQLStatement | null => { - if (!iucn3_id || !project_id) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_iucn_action_classification ( - iucn_conservation_action_level_3_subclassification_id, - project_id - ) VALUES ( - ${iucn3_id}, - ${project_id} - ) - RETURNING - project_iucn_action_classification_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a project activity row. - * - * @param activityId - * @param projectId - * @returns {SQLStatement} sql query object - */ -export const postProjectActivitySQL = (activityId: number, projectId: number): SQLStatement | null => { - if (!activityId || !projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO project_activity ( - activity_id, - project_id - ) VALUES ( - ${activityId}, - ${projectId} - ) - RETURNING - project_activity_id as id; - `; - - return sqlStatement; -}; diff --git a/api/src/queries/project/project-delete-queries.test.ts b/api/src/queries/project/project-delete-queries.test.ts deleted file mode 100644 index 72a8c590ed..0000000000 --- a/api/src/queries/project/project-delete-queries.test.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - deleteActivitiesSQL, - deleteIndigenousPartnershipsSQL, - deleteIUCNSQL, - deletePermitSQL, - deleteProjectFundingSourceSQL, - deleteProjectSQL, - deleteStakeholderPartnershipsSQL -} from './project-delete-queries'; - -describe('deleteIUCNSQL', () => { - it('returns null response when null projectId provided', () => { - const response = deleteIUCNSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deleteIUCNSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deletePermitSQL', () => { - it('returns null response when null projectId provided', () => { - const response = deletePermitSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deletePermitSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteIndigenousPartnershipsSQL', () => { - it('returns null response when null projectId provided', () => { - const response = deleteIndigenousPartnershipsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deleteIndigenousPartnershipsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteStakeholderPartnershipsSQL', () => { - it('returns null response when null projectId provided', () => { - const response = deleteStakeholderPartnershipsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deleteStakeholderPartnershipsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteActivitiesSQL', () => { - it('returns null response when null projectId provided', () => { - const response = deleteActivitiesSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deleteActivitiesSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteProjectSQL', () => { - it('returns null response when null projectId provided', () => { - const response = deleteProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deleteProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteProjectFundingSourceSQL', () => { - it('returns null response when null pfsId (project funding source) provided', () => { - const response = deleteProjectFundingSourceSQL((null as unknown) as number, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = deleteProjectFundingSourceSQL(1, 1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/project/project-delete-queries.ts b/api/src/queries/project/project-delete-queries.ts deleted file mode 100644 index 2cbffdc999..0000000000 --- a/api/src/queries/project/project-delete-queries.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to delete project indigenous partnership rows (project_first_nations) - * - * @param {projectId} projectId - * @returns {SQLStatement} sql query object - */ -export const deleteIndigenousPartnershipsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from project_first_nation - WHERE - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete permit rows associated to a project - * - * @param {projectId} projectId - * @returns {SQLStatement} sql query object - */ -export const deletePermitSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from permit - WHERE - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete project stakeholder partnership rows - * - * @param {projectId} projectId - * @returns {SQLStatement} sql query object - */ -export const deleteStakeholderPartnershipsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from stakeholder_partnership - WHERE - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete project IUCN rows. - * - * @param {projectId} projectId - * @returns {SQLStatement} sql query object - */ -export const deleteIUCNSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from project_iucn_action_classification - WHERE - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete project activity rows. - * - * @param {projectId} projectId - * @returns {SQLStatement} sql query object - */ -export const deleteActivitiesSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE FROM - project_activity - WHERE - project_id = ${projectId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete the specific project funding source record. - * - * @param {projectId} projectId - * @param {pfsId} pfsId - * @returns {SQLStatement} sql query object - */ -export const deleteProjectFundingSourceSQL = ( - projectId: number | undefined, - pfsId: number | undefined -): SQLStatement | null => { - if (!projectId || !pfsId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from project_funding_source - WHERE - project_id = ${projectId} - AND - project_funding_source_id = ${pfsId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete a project row (and associated data) based on project ID. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const deleteProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL`call api_delete_project(${projectId})`; - - return sqlStatement; -}; diff --git a/api/src/queries/project/project-update-queries.test.ts b/api/src/queries/project/project-update-queries.test.ts deleted file mode 100644 index f7cf2df7df..0000000000 --- a/api/src/queries/project/project-update-queries.test.ts +++ /dev/null @@ -1,300 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - PutCoordinatorData, - PutFundingSource, - PutLocationData, - PutObjectivesData, - PutProjectData -} from '../../models/project-update'; -import { - getCoordinatorByProjectSQL, - getIndigenousPartnershipsByProjectSQL, - getIUCNActionClassificationByProjectSQL, - getObjectivesByProjectSQL, - getPermitsByProjectSQL, - getProjectByProjectSQL, - putProjectFundingSourceSQL, - putProjectSQL, - updateProjectPublishStatusSQL -} from './project-update-queries'; - -describe('getIndigenousPartnershipsByProjectSQL', () => { - it('Null projectId', () => { - const response = getIndigenousPartnershipsByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getIndigenousPartnershipsByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getPermitsByProjectSQL', () => { - it('Null projectId', () => { - const response = getPermitsByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getPermitsByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getIUCNActionClassificationByProjectSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getIUCNActionClassificationByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = getIUCNActionClassificationByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getCoordinatorByProjectSQL', () => { - it('Null projectId', () => { - const response = getCoordinatorByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getCoordinatorByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectByProjectSQL', () => { - it('Null projectId', () => { - const response = getProjectByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getProjectByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('putProjectSQL', () => { - it('returns null when an invalid projectId is provided', () => { - const response = putProjectSQL((null as unknown) as number, null, null, null, null, 1); - - expect(response).to.be.null; - }); - - it('returns null when a valid projectId but no data to update is provided', () => { - const response = putProjectSQL(1, null, null, null, null, 1); - - expect(response).to.be.null; - }); - - it('returns valid sql when only project data is provided', () => { - const response = putProjectSQL( - 1, - new PutProjectData({ - name: 'project name', - type: 1, - start_date: '2020-04-20T07:00:00.000Z', - end_date: '2020-05-20T07:00:00.000Z' - }), - null, - null, - null, - 1 - ); - - expect(response).to.not.be.null; - }); - - it('returns valid sql when only location data is provided', () => { - const response = putProjectSQL( - 1, - null, - new PutLocationData({ - location_description: 'description', - geometry: [ - { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [125.6, 10.1] - }, - properties: { - name: 'Dinagat Islands' - } - } - ] - }), - null, - null, - 1 - ); - - expect(response).to.not.be.null; - }); - - it('returns valid sql when only objectives data is provided', () => { - const response = putProjectSQL( - 1, - null, - null, - new PutObjectivesData({ - objectives: 'objectives', - caveats: 'caveats', - revision_count: 1 - }), - null, - 1 - ); - - expect(response).to.not.be.null; - }); - - it('returns valid sql when only coordinator data is provided', () => { - const response = putProjectSQL( - 1, - null, - null, - null, - new PutCoordinatorData({ - first_name: 'first name', - last_name: 'last name', - email_address: 'email@email.com', - coordinator_agency: 'agency', - share_contact_details: 'true', - revision_count: 1 - }), - 1 - ); - - expect(response).to.not.be.null; - }); - - it('returns valid sql when all data is provided', () => { - const response = putProjectSQL( - 1, - new PutProjectData({ - name: 'project name', - type: 1, - start_date: '2020-04-20T07:00:00.000Z', - end_date: '2020-05-20T07:00:00.000Z' - }), - new PutLocationData({ - location_description: 'description' - }), - new PutObjectivesData({ - objectives: 'objectives', - caveats: 'caveats', - revision_count: 1 - }), - new PutCoordinatorData({ - first_name: 'first name', - last_name: 'last name', - email_address: 'email@email.com', - coordinator_agency: 'agency', - share_contact_details: 'true', - revision_count: 1 - }), - 1 - ); - - expect(response).to.not.be.null; - }); -}); - -describe('getObjectivesByProjectSQL', () => { - it('Null projectId', () => { - const response = getObjectivesByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getObjectivesByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('putProjectFundingSourceSQL', () => { - describe('with invalid parameters', () => { - it('returns null when funding source is null', () => { - const response = putProjectFundingSourceSQL((null as unknown) as PutFundingSource, 1); - - expect(response).to.be.null; - }); - - it('returns null when project id is null', () => { - const response = putProjectFundingSourceSQL(new PutFundingSource({}), (null as unknown) as number); - - expect(response).to.be.null; - }); - }); - - describe('with valid parameters', () => { - it('returns a SQLStatement when all fields are passed in as expected', () => { - const response = putProjectFundingSourceSQL( - new PutFundingSource({ - fundingSources: [ - { - investment_action_category: 222, - agency_project_id: 'funding source name', - funding_amount: 10000, - start_date: '2020-02-02', - end_date: '2020-03-02', - revision_count: 11 - } - ] - }), - 1 - ); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(222); - expect(response?.values).to.deep.include('funding source name'); - expect(response?.values).to.deep.include(10000); - expect(response?.values).to.deep.include('2020-02-02'); - expect(response?.values).to.deep.include('2020-03-02'); - }); - }); -}); - -describe('updateProjectPublishStatusSQL', () => { - describe('with invalid parameters', () => { - it('returns null when project is null', () => { - const response = updateProjectPublishStatusSQL((null as unknown) as number, true); - - expect(response).to.be.null; - }); - }); - - describe('with valid parameters', () => { - it('returns a SQLStatement when there is a real date value', () => { - const response = updateProjectPublishStatusSQL(1, true); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(1); - }); - - it('returns a SQLStatement when the date value is null', () => { - const response = updateProjectPublishStatusSQL(1, false); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(1); - }); - }); -}); diff --git a/api/src/queries/project/project-update-queries.ts b/api/src/queries/project/project-update-queries.ts deleted file mode 100644 index 913a219027..0000000000 --- a/api/src/queries/project/project-update-queries.ts +++ /dev/null @@ -1,319 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { - PutCoordinatorData, - PutFundingSource, - PutLocationData, - PutObjectivesData, - PutProjectData -} from '../../models/project-update'; -import { queries } from '../queries'; - -/** - * SQL query to get IUCN action classifications. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getIUCNActionClassificationByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - ical1c.iucn_conservation_action_level_1_classification_id as classification, - ical2s.iucn_conservation_action_level_2_subclassification_id as subClassification1, - ical3s.iucn_conservation_action_level_3_subclassification_id as subClassification2 - FROM - project_iucn_action_classification as piac - LEFT OUTER JOIN - iucn_conservation_action_level_3_subclassification as ical3s - ON - piac.iucn_conservation_action_level_3_subclassification_id = ical3s.iucn_conservation_action_level_3_subclassification_id - LEFT OUTER JOIN - iucn_conservation_action_level_2_subclassification as ical2s - ON - ical3s.iucn_conservation_action_level_2_subclassification_id = ical2s.iucn_conservation_action_level_2_subclassification_id - LEFT OUTER JOIN - iucn_conservation_action_level_1_classification as ical1c - ON - ical2s.iucn_conservation_action_level_1_classification_id = ical1c.iucn_conservation_action_level_1_classification_id - WHERE - piac.project_id = ${projectId} - GROUP BY - ical1c.iucn_conservation_action_level_1_classification_id, - ical2s.iucn_conservation_action_level_2_subclassification_id, - ical3s.iucn_conservation_action_level_3_subclassification_id; - `; -}; - -/** - * SQL query to get project indigenous partnerships. - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getIndigenousPartnershipsByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - project_first_nation_id as id - FROM - project_first_nation pfn - WHERE - pfn.project_id = ${projectId} - GROUP BY - project_first_nation_id; - `; -}; - -/** - * SQL query to get permits associated to a project. - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getPermitsByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - number, - type - FROM - permit - WHERE - project_id = ${projectId}; - `; -}; - -/** - * SQL query to get coordinator information, for update purposes. - * - * @param {number} projectId - * @return {*} {(SQLStatement | null)} - */ -export const getCoordinatorByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - coordinator_first_name, - coordinator_last_name, - coordinator_email_address, - coordinator_agency_name, - coordinator_public, - revision_count - FROM - project - WHERE - project_id = ${projectId}; - `; -}; - -/** - * SQL query to get project information, for update purposes. - * - * @param {number} projectId - * @return {*} {(SQLStatement | null)} - */ -export const getProjectByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - name, - project_type_id as pt_id, - start_date, - end_date, - revision_count - FROM - project - WHERE - project_id = ${projectId}; - `; -}; - -/** - * SQL query to update a project row. - * - * @param {(PutProjectData & PutLocationData & PutCoordinatorData & PutObjectivesData)} project - * @returns {SQLStatement} sql query object - */ -export const putProjectSQL = ( - projectId: number, - project: PutProjectData | null, - location: PutLocationData | null, - objectives: PutObjectivesData | null, - coordinator: PutCoordinatorData | null, - revision_count: number -): SQLStatement | null => { - if (!projectId) { - return null; - } - - if (!project && !location && !objectives && !coordinator) { - // Nothing to update - return null; - } - - const sqlStatement: SQLStatement = SQL`UPDATE project SET `; - - const sqlSetStatements: SQLStatement[] = []; - - if (project) { - sqlSetStatements.push(SQL`project_type_id = ${project.type}`); - sqlSetStatements.push(SQL`name = ${project.name}`); - sqlSetStatements.push(SQL`start_date = ${project.start_date}`); - sqlSetStatements.push(SQL`end_date = ${project.end_date}`); - } - - if (location) { - sqlSetStatements.push(SQL`location_description = ${location.location_description}`); - sqlSetStatements.push(SQL`geojson = ${JSON.stringify(location.geometry)}`); - - const geometrySQLStatement = SQL`geography = `; - - if (location.geometry && location.geometry.length) { - const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(location.geometry); - - geometrySQLStatement.append(SQL` - public.geography( - public.ST_Force2D( - public.ST_SetSRID( - `); - - geometrySQLStatement.append(geometryCollectionSQL); - - geometrySQLStatement.append(SQL` - , 4326))) - `); - } else { - geometrySQLStatement.append(SQL`null`); - } - - sqlSetStatements.push(geometrySQLStatement); - } - - if (objectives) { - sqlSetStatements.push(SQL`objectives = ${objectives.objectives}`); - sqlSetStatements.push(SQL`caveats = ${objectives.caveats}`); - } - - if (coordinator) { - sqlSetStatements.push(SQL`coordinator_first_name = ${coordinator.first_name}`); - sqlSetStatements.push(SQL`coordinator_last_name = ${coordinator.last_name}`); - sqlSetStatements.push(SQL`coordinator_email_address = ${coordinator.email_address}`); - sqlSetStatements.push(SQL`coordinator_agency_name = ${coordinator.coordinator_agency}`); - sqlSetStatements.push(SQL`coordinator_public = ${coordinator.share_contact_details}`); - } - - sqlSetStatements.forEach((item, index) => { - sqlStatement.append(item); - if (index < sqlSetStatements.length - 1) { - sqlStatement.append(','); - } - }); - - sqlStatement.append(SQL` - WHERE - project_id = ${projectId} - AND - revision_count = ${revision_count}; - `); - - return sqlStatement; -}; - -/** - * SQL query to get objectives information, for update purposes. - * - * @param {number} projectId - * @return {*} {(SQLStatement | null)} - */ -export const getObjectivesByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - objectives, - caveats, - revision_count - FROM - project - WHERE - project_id = ${projectId}; - `; -}; - -/** - * SQL query to put (insert) a project funding source row. - * - * @param {PutFundingSource} fundingSource - * @returns {SQLStatement} sql query object - */ -export const putProjectFundingSourceSQL = ( - fundingSource: PutFundingSource | null, - projectId: number -): SQLStatement | null => { - if (!fundingSource || !projectId) { - return null; - } - - return SQL` - INSERT INTO project_funding_source ( - project_id, - investment_action_category_id, - funding_source_project_id, - funding_amount, - funding_start_date, - funding_end_date - ) VALUES ( - ${projectId}, - ${fundingSource.investment_action_category}, - ${fundingSource.agency_project_id}, - ${fundingSource.funding_amount}, - ${fundingSource.start_date}, - ${fundingSource.end_date} - ) - RETURNING - project_funding_source_id as id; - `; -}; - -/** - * SQL query to update the publish status of a project. - * - * @param {number} projectId - * @param {boolean} publish - * @returns {SQLStatement} sql query object - */ -export const updateProjectPublishStatusSQL = (projectId: number, publish: boolean): SQLStatement | null => { - if (!projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL`UPDATE project SET publish_timestamp = `; - - if (publish === true) { - sqlStatement.append(SQL` - now() WHERE publish_timestamp IS NULL AND project_id = ${projectId} - `); - } else { - sqlStatement.append(SQL` - null WHERE project_id = ${projectId} - `); - } - sqlStatement.append(SQL` RETURNING project_id as id;`); - - return sqlStatement; -}; diff --git a/api/src/queries/project/project-view-queries.test.ts b/api/src/queries/project/project-view-queries.test.ts deleted file mode 100644 index f81896f1f9..0000000000 --- a/api/src/queries/project/project-view-queries.test.ts +++ /dev/null @@ -1,216 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - getActivitiesByProjectSQL, - getFundingSourceByProjectSQL, - getIndigenousPartnershipsByProjectSQL, - getIUCNActionClassificationByProjectSQL, - getLocationByProjectSQL, - getProjectListSQL, - getProjectPermitsSQL, - getProjectSQL, - getStakeholderPartnershipsByProjectSQL -} from './project-view-queries'; - -describe('getProjectSQL', () => { - describe('Null project id param provided', () => { - it('returns null', () => { - // force the function to accept a null value - const response = getProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - }); - - describe('Valid project id param provided', () => { - it('returns a SQLStatement', () => { - const response = getProjectSQL(1); - - expect(response).to.not.be.null; - }); - }); -}); - -describe('getProjectListSQL', () => { - it('returns null when no systemUserId provided', () => { - const response = getProjectListSQL(true, null); - - expect(response).to.be.null; - }); - - it('returns a SQLStatement when isUserAdmin and systemUserId but no filter fields provided', () => { - const response = getProjectListSQL(true, 3); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when not isUserAdmin and systemUserId but no filter fields provided', () => { - const response = getProjectListSQL(false, 3); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only contact agency)', () => { - const response = getProjectListSQL(true, 1, { coordinator_agency: 'agency' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only permit number)', () => { - const response = getProjectListSQL(true, 1, { permit_number: '123' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only project type)', () => { - const response = getProjectListSQL(true, 1, { project_type: 'type' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only project name)', () => { - const response = getProjectListSQL(true, 1, { project_name: 'name' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only agency project id)', () => { - const response = getProjectListSQL(true, 1, { agency_project_id: 'agency_project_id' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only agency id)', () => { - const response = getProjectListSQL(true, 1, { agency_id: 'agency_id' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only keyword)', () => { - const response = getProjectListSQL(true, 1, { keyword: 'agency' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only species)', () => { - const response = getProjectListSQL(true, 1, { species: ['species 1', 'species 2'] }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only start date)', () => { - const response = getProjectListSQL(true, 1, { start_date: '2020/04/04' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (only end date)', () => { - const response = getProjectListSQL(true, 1, { end_date: '2020/04/04' }); - - expect(response).to.not.be.null; - }); - - it('returns a SQLStatement when filter fields provided (both start and end dates)', () => { - const response = getProjectListSQL(true, 1, { start_date: '2020/04/04', end_date: '2020/05/05' }); - - expect(response).to.not.be.null; - }); -}); - -describe('getIUCNActionClassificationByProjectSQL', () => { - it('returns null response when null projectId provided', () => { - const response = getIUCNActionClassificationByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectId provided', () => { - const response = getIUCNActionClassificationByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getIndigenousPartnershipsByProjectSQL', () => { - it('Null projectId', () => { - const response = getIndigenousPartnershipsByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getIndigenousPartnershipsByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getStakeholderPartnershipsByProjectSQL', () => { - it('Null projectId', () => { - const response = getStakeholderPartnershipsByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getStakeholderPartnershipsByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectPermitsSQL', () => { - it('Null projectId', () => { - const response = getProjectPermitsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getProjectPermitsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getLocationByProjectSQL', () => { - it('Null projectId', () => { - const response = getLocationByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getLocationByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getActivitiesByProjectSQL', () => { - it('Null projectId', () => { - const response = getActivitiesByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getActivitiesByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getFundingSourceByProjectSQL', () => { - it('Null projectId', () => { - const response = getFundingSourceByProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('valid projectId', () => { - const response = getFundingSourceByProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/project/project-view-queries.ts b/api/src/queries/project/project-view-queries.ts deleted file mode 100644 index 616bb26eef..0000000000 --- a/api/src/queries/project/project-view-queries.ts +++ /dev/null @@ -1,380 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to get a single project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - project.project_id as id, - project.uuid, - project.project_type_id as pt_id, - project_type.name as type, - project.name, - project.objectives, - project.location_description, - project.start_date, - project.end_date, - project.caveats, - project.comments, - project.coordinator_first_name, - project.coordinator_last_name, - project.coordinator_email_address, - project.coordinator_agency_name, - project.coordinator_public, - project.geojson as geometry, - project.create_date, - project.create_user, - project.update_date, - project.update_user, - project.revision_count, - project.publish_timestamp as publish_date - from - project - left outer join - project_type - on project.project_type_id = project_type.project_type_id - where - project.project_id = ${projectId}; - `; -}; - -/** - * SQL query to get all projects. - * - * @param {boolean} isUserAdmin - * @param {number | null} systemUserId - * @param {any} filterFields - * @returns {SQLStatement} sql query object - */ -export const getProjectListSQL = ( - isUserAdmin: boolean, - systemUserId: number | null, - filterFields?: any -): SQLStatement | null => { - if (!systemUserId) { - return null; - } - - const sqlStatement = SQL` - SELECT - p.project_id as id, - p.name, - p.start_date, - p.end_date, - p.coordinator_agency_name as coordinator_agency, - p.publish_timestamp, - pt.name as project_type, - string_agg(DISTINCT pp.number, ', ') as permits_list - from - project as p - left outer join project_type as pt - on p.project_type_id = pt.project_type_id - left outer join permit as pp - on p.project_id = pp.project_id - left outer join project_funding_source as pfs - on pfs.project_id = p.project_id - left outer join investment_action_category as iac - on pfs.investment_action_category_id = iac.investment_action_category_id - left outer join funding_source as fs - on iac.funding_source_id = fs.funding_source_id - left outer join survey as s - on s.project_id = p.project_id - left outer join study_species as sp - on sp.survey_id = s.survey_id - where 1 = 1 - `; - - if (!isUserAdmin) { - sqlStatement.append(SQL` - AND p.project_id IN ( - SELECT - project_id - FROM - project_participation - where - system_user_id = ${systemUserId} - ) - `); - } - - if (filterFields && Object.keys(filterFields).length !== 0 && filterFields.constructor === Object) { - if (filterFields.coordinator_agency) { - sqlStatement.append(SQL` AND p.coordinator_agency_name = ${filterFields.coordinator_agency}`); - } - - if (filterFields.start_date && !filterFields.end_date) { - sqlStatement.append(SQL` AND p.start_date >= ${filterFields.start_date}`); - } - - if (!filterFields.start_date && filterFields.end_date) { - sqlStatement.append(SQL` AND p.end_date <= ${filterFields.end_date}`); - } - - if (filterFields.start_date && filterFields.end_date) { - sqlStatement.append( - SQL` AND p.start_date >= ${filterFields.start_date} AND p.end_date <= ${filterFields.end_date}` - ); - } - - if (filterFields.permit_number) { - sqlStatement.append(SQL` AND pp.number = ${filterFields.permit_number}`); - } - - if (filterFields.project_type) { - sqlStatement.append(SQL` AND pt.name = ${filterFields.project_type}`); - } - - if (filterFields.project_name) { - sqlStatement.append(SQL` AND p.name = ${filterFields.project_name}`); - } - - if (filterFields.agency_project_id) { - sqlStatement.append(SQL` AND pfs.funding_source_project_id = ${filterFields.agency_project_id}`); - } - - if (filterFields.agency_id) { - sqlStatement.append(SQL` AND fs.funding_source_id = ${filterFields.agency_id}`); - } - - if (filterFields.species && filterFields.species.length) { - sqlStatement.append(SQL` AND sp.wldtaxonomic_units_id =${filterFields.species[0]}`); - } - - if (filterFields.keyword) { - const keyword_string = '%'.concat(filterFields.keyword).concat('%'); - sqlStatement.append(SQL` AND p.name ilike ${keyword_string}`); - sqlStatement.append(SQL` OR p.coordinator_agency_name ilike ${keyword_string}`); - sqlStatement.append(SQL` OR fs.name ilike ${keyword_string}`); - sqlStatement.append(SQL` OR s.name ilike ${keyword_string}`); - } - } - - sqlStatement.append(SQL` - group by - p.project_id, - p.name, - p.start_date, - p.end_date, - p.coordinator_agency_name, - p.publish_timestamp, - pt.name; - `); - - return sqlStatement; -}; - -/** - * SQL query to get IUCN action classifications. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getIUCNActionClassificationByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - ical1c.iucn_conservation_action_level_1_classification_id as classification, - ical2s.iucn_conservation_action_level_2_subclassification_id as subClassification1, - ical3s.iucn_conservation_action_level_3_subclassification_id as subClassification2 - FROM - project_iucn_action_classification as piac - LEFT OUTER JOIN - iucn_conservation_action_level_3_subclassification as ical3s - ON - piac.iucn_conservation_action_level_3_subclassification_id = ical3s.iucn_conservation_action_level_3_subclassification_id - LEFT OUTER JOIN - iucn_conservation_action_level_2_subclassification as ical2s - ON - ical3s.iucn_conservation_action_level_2_subclassification_id = ical2s.iucn_conservation_action_level_2_subclassification_id - LEFT OUTER JOIN - iucn_conservation_action_level_1_classification as ical1c - ON - ical2s.iucn_conservation_action_level_1_classification_id = ical1c.iucn_conservation_action_level_1_classification_id - WHERE - piac.project_id = ${projectId} - GROUP BY - ical1c.iucn_conservation_action_level_1_classification_id, - ical2s.iucn_conservation_action_level_2_subclassification_id, - ical3s.iucn_conservation_action_level_3_subclassification_id; - `; -}; - -/** - * SQL query to get project indigenous partnerships. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getIndigenousPartnershipsByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - fn.first_nations_id as id, - fn.name as first_nations_name - FROM - project_first_nation pfn - LEFT OUTER JOIN - first_nations fn - ON - pfn.first_nations_id = fn.first_nations_id - WHERE - pfn.project_id = ${projectId} - GROUP BY - fn.first_nations_id, - fn.name; - `; -}; - -/** - * SQL query to get project stakeholder partnerships. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getStakeholderPartnershipsByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - name as partnership_name - FROM - stakeholder_partnership - WHERE - project_id = ${projectId}; - `; -}; - -/** - * SQL query to get permits associated to a project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getProjectPermitsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - number, - type - FROM - permit - WHERE - project_id = ${projectId} - `; -}; - -/** - * SQL query to get project location. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getLocationByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - p.location_description, - p.geojson as geometry, - p.revision_count - FROM - project p - WHERE - p.project_id = ${projectId} - GROUP BY - p.location_description, - p.geojson, - p.revision_count; - `; -}; - -/** - * SQL query to get project activities. - * - * @param {string} projectId - * @returns {SQLStatement} sql query object - */ - -export const getActivitiesByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - activity_id - from - project_activity - where project_id = ${projectId}; - `; -}; - -/** - * SQL query to get funding source data - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getFundingSourceByProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - pfs.project_funding_source_id as id, - fs.funding_source_id as agency_id, - pfs.funding_amount::numeric::int, - pfs.funding_start_date as start_date, - pfs.funding_end_date as end_date, - iac.investment_action_category_id as investment_action_category, - iac.name as investment_action_category_name, - fs.name as agency_name, - pfs.funding_source_project_id as agency_project_id, - pfs.revision_count as revision_count - FROM - project_funding_source as pfs - LEFT OUTER JOIN - investment_action_category as iac - ON - pfs.investment_action_category_id = iac.investment_action_category_id - LEFT OUTER JOIN - funding_source as fs - ON - iac.funding_source_id = fs.funding_source_id - WHERE - pfs.project_id = ${projectId} - GROUP BY - pfs.project_funding_source_id, - fs.funding_source_id, - pfs.funding_source_project_id, - pfs.funding_amount, - pfs.funding_start_date, - pfs.funding_end_date, - iac.investment_action_category_id, - iac.name, - fs.name, - pfs.revision_count - `; -}; diff --git a/api/src/queries/public/index.ts b/api/src/queries/public/index.ts deleted file mode 100644 index 44f5fb2f5d..0000000000 --- a/api/src/queries/public/index.ts +++ /dev/null @@ -1,4 +0,0 @@ -import * as project from './project-queries'; -import * as search from './search-queries'; - -export default { ...project, ...search }; diff --git a/api/src/queries/public/project-queries.test.ts b/api/src/queries/public/project-queries.test.ts deleted file mode 100644 index 4e95239b51..0000000000 --- a/api/src/queries/public/project-queries.test.ts +++ /dev/null @@ -1,151 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - getActivitiesByPublicProjectSQL, - getProjectReportAuthorsSQL, - getPublicProjectAttachmentS3KeySQL, - getPublicProjectAttachmentsSQL, - getPublicProjectListSQL, - getPublicProjectReportAttachmentS3KeySQL, - getPublicProjectReportAttachmentSQL, - getPublicProjectReportAttachmentsSQL, - getPublicProjectSQL -} from './project-queries'; - -describe('getPublicProjectSQL', () => { - it('returns null when null project id param provided', () => { - const response = getPublicProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid project id param provided', () => { - const response = getPublicProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getActivitiesByPublicProjectSQL', () => { - it('returns null when null project id param provided', () => { - const response = getActivitiesByPublicProjectSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid project id param provided', () => { - const response = getActivitiesByPublicProjectSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getPublicProjectListSQL', () => { - it('returns non null response when called', () => { - const response = getPublicProjectListSQL(); - - expect(response).to.not.be.null; - }); -}); - -describe('getPublicProjectAttachmentsSQL', () => { - it('returns null when null project id param provided', () => { - const response = getPublicProjectAttachmentsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid project id param provided', () => { - const response = getPublicProjectAttachmentsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getPublicProjectReportAttachmentsSQL', () => { - it('returns null when null project id param provided', () => { - const response = getPublicProjectReportAttachmentsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid project id param provided', () => { - const response = getPublicProjectReportAttachmentsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getPublicProjectReportAttachmentS3KeySQL', () => { - it('returns null when null project id param provided', () => { - const response = getPublicProjectReportAttachmentS3KeySQL((null as unknown) as number, 2); - - expect(response).to.be.null; - }); - - it('returns null when null attachment id param provided', () => { - const response = getPublicProjectReportAttachmentS3KeySQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid attachment id param provided', () => { - const response = getPublicProjectReportAttachmentS3KeySQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getPublicProjectAttachmentS3KeySQL', () => { - it('returns null when null project id param provided', () => { - const response = getPublicProjectAttachmentS3KeySQL((null as unknown) as number, 2); - - expect(response).to.be.null; - }); - - it('returns null when null attachment id param provided', () => { - const response = getPublicProjectAttachmentS3KeySQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getPublicProjectAttachmentS3KeySQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getPublicProjectReportAttachmentSQL', () => { - it('returns null when null project id param provided', () => { - const response = getPublicProjectReportAttachmentSQL((null as unknown) as number, 2); - - expect(response).to.be.null; - }); - - it('returns null when null attachment id param provided', () => { - const response = getPublicProjectReportAttachmentSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getPublicProjectReportAttachmentSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getProjectReportAuthorSQL', () => { - it('returns null response when null projectReportAttachmentId provided', () => { - const response = getProjectReportAuthorsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectReportAttachmentId provided', () => { - const response = getProjectReportAuthorsSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/public/project-queries.ts b/api/src/queries/public/project-queries.ts deleted file mode 100644 index f07fddc990..0000000000 --- a/api/src/queries/public/project-queries.ts +++ /dev/null @@ -1,274 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to get a single public (published) project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getPublicProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - project.project_id as id, - project.project_type_id as pt_id, - project_type.name as type, - project.name, - project.location_description, - project.start_date, - project.end_date, - project.caveats, - project.comments, - project.geojson as geometry, - project.publish_timestamp as publish_date - from - project - left outer join - project_type - on project.project_type_id = project_type.project_type_id - where - project.project_id = ${projectId} - and project.publish_timestamp is not null; - `; -}; - -/** - * SQL query to get public (published) project activities. - * - * @param {string} projectId - * @returns {SQLStatement} sql query object - */ - -export const getActivitiesByPublicProjectSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - pa.activity_id - from - project_activity as pa - LEFT OUTER JOIN - project as p - ON - p.project_id = pa.project_id - WHERE - pa.project_id = ${projectId} - AND - p.publish_timestamp is not null; - `; -}; - -/** - * SQL query to get all public facing (published) projects. - * - * @returns {SQLStatement} sql query object - */ -export const getPublicProjectListSQL = (): SQLStatement | null => { - return SQL` - SELECT - p.project_id as id, - p.name, - p.start_date, - p.end_date, - p.coordinator_agency_name as coordinator_agency, - pt.name as project_type, - string_agg(DISTINCT pp.number, ', ') as permits_list - from - project as p - left outer join project_type as pt - on p.project_type_id = pt.project_type_id - left outer join permit as pp - on p.project_id = pp.project_id - where - p.publish_timestamp is not null - group by - p.project_id, - p.name, - p.start_date, - p.end_date, - p.coordinator_agency_name, - pt.name; - `; -}; - -/** - * SQL query to get attachments for a single public (published) project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getPublicProjectAttachmentsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - pa.project_attachment_id as id, - pa.file_name, - pa.update_date, - pa.create_date, - pa.file_size, - pa.file_type, - CASE WHEN api_security_check(pa.security_token,pa.create_user) THEN false ELSE true END as is_secured - from - project_attachment as pa - left outer join - project as p - on - p.project_id = pa.project_id - where - pa.project_id = ${projectId} - and - p.publish_timestamp is not null; - `; -}; - -/** - * SQL query to get report attachments for a single public (published) project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getPublicProjectReportAttachmentsSQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - pa.project_report_attachment_id as id, - pa.file_name, - pa.update_date, - pa.create_date, - pa.file_size, - CASE WHEN api_security_check(pa.security_token,pa.create_user) THEN false ELSE true END as is_secured - from - project_report_attachment as pa - left outer join - project as p - on - p.project_id = pa.project_id - where - pa.project_id = ${projectId} - and - p.publish_timestamp is not null; - `; -}; - -/** - * SQL query to get S3 key of an attachment for a single public (published) project. - * - * @param {number} projectId - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const getPublicProjectAttachmentS3KeySQL = (projectId: number, attachmentId: number): SQLStatement | null => { - if (!projectId || !attachmentId) { - return null; - } - - return SQL` - SELECT - CASE WHEN api_security_check(security_token,create_user) THEN key ELSE null - END as key - FROM - project_attachment - WHERE - project_id = ${projectId} - AND - project_attachment_id = ${attachmentId}; - `; -}; - -/** - * SQL query to get S3 key of a report attachment for a single public (published) project. - * - * @param {number} projectId - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const getPublicProjectReportAttachmentS3KeySQL = ( - projectId: number, - attachmentId: number -): SQLStatement | null => { - if (!projectId || !attachmentId) { - return null; - } - - return SQL` - SELECT - CASE WHEN api_security_check(security_token,create_user) THEN key ELSE null - END as key - FROM - project_report_attachment - WHERE - project_id = ${projectId} - AND - project_report_attachment_id = ${attachmentId}; - `; -}; - -/** - * Get the metadata fields of an unsecured project report attachment, for the specified `projectId` and `attachmentId`. - * - * @param {number} projectId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const getPublicProjectReportAttachmentSQL = (projectId: number, attachmentId: number): SQLStatement | null => { - if (!projectId || !attachmentId) { - return null; - } - - return SQL` - SELECT - project_report_attachment_id as attachment_id, - file_name, - title, - description, - year as year_published, - update_date, - create_date, - file_size, - CASE WHEN api_security_check(security_token,create_user) THEN key ELSE null - END as key, - security_token, - revision_count - FROM - project_report_attachment - where - project_report_attachment_id = ${attachmentId} - and - project_id = ${projectId} - `; -}; - -/** - * Get the metadata fields of project report attachment, for the specified `projectId` and `attachmentId`. - * - * @param {number} projectId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const getProjectReportAuthorsSQL = (projectReportAttachmentId: number): SQLStatement | null => { - if (!projectReportAttachmentId) { - return null; - } - - return SQL` - SELECT - project_report_author.* - FROM - project_report_author - where - project_report_attachment_id = ${projectReportAttachmentId} - `; -}; diff --git a/api/src/queries/public/search-queries.test.ts b/api/src/queries/public/search-queries.test.ts deleted file mode 100644 index de5937a3b9..0000000000 --- a/api/src/queries/public/search-queries.test.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { getPublicSpatialSearchResultsSQL } from './search-queries'; - -describe('getPublicSpatialSearchResultsSQL', () => { - it('returns a non null result', () => { - const response = getPublicSpatialSearchResultsSQL(); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/public/search-queries.ts b/api/src/queries/public/search-queries.ts deleted file mode 100644 index d70adb3e11..0000000000 --- a/api/src/queries/public/search-queries.ts +++ /dev/null @@ -1,19 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to get public project geometries - * - * @returns {SQLStatement} sql query object - */ -export const getPublicSpatialSearchResultsSQL = (): SQLStatement | null => { - return SQL` - SELECT - p.project_id as id, - p.name, - public.ST_asGeoJSON(p.geography) as geometry - from - project as p - where - p.publish_timestamp is not null; - `; -}; diff --git a/api/src/queries/queries.ts b/api/src/queries/queries.ts index ef069829d4..eaac7f1533 100644 --- a/api/src/queries/queries.ts +++ b/api/src/queries/queries.ts @@ -1,31 +1,17 @@ import administrativeActivity from './administrative-activity'; import codes from './codes'; -import database from './database'; -import dwc from './dwc'; -import occurrence from './occurrence'; -import permit from './permit'; import project from './project'; import projectParticipation from './project-participation'; -import publicQueries from './public'; import search from './search'; -import security from './security'; import spatial from './spatial'; -import survey from './survey'; import users from './users'; export const queries = { administrativeActivity, codes, - database, - dwc, - occurrence, - permit, project, projectParticipation, - public: publicQueries, search, - security, spatial, - survey, users }; diff --git a/api/src/queries/search/search-queries.ts b/api/src/queries/search/search-queries.ts index 81e438b1f1..2d032660c5 100644 --- a/api/src/queries/search/search-queries.ts +++ b/api/src/queries/search/search-queries.ts @@ -19,15 +19,12 @@ export const getSpatialSearchResultsSQL = (isUserAdmin: boolean, systemUserId: n public.ST_asGeoJSON(p.geography) as geometry from project as p - where - p.publish_timestamp is not null `; if (!isUserAdmin) { - sqlStatement.append(SQL` and p.create_user = ${systemUserId};`); - } else { - sqlStatement.append(SQL`;`); + sqlStatement.append(SQL`WHERE p.create_user = ${systemUserId};`); } + sqlStatement.append(SQL`;`); return sqlStatement; }; diff --git a/api/src/queries/security/index.ts b/api/src/queries/security/index.ts deleted file mode 100644 index 69055b0d84..0000000000 --- a/api/src/queries/security/index.ts +++ /dev/null @@ -1,3 +0,0 @@ -import * as security from './security-queries'; - -export default { ...security }; diff --git a/api/src/queries/security/security-queries.test.ts b/api/src/queries/security/security-queries.test.ts deleted file mode 100644 index 6e6f11efd7..0000000000 --- a/api/src/queries/security/security-queries.test.ts +++ /dev/null @@ -1,49 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { secureAttachmentRecordSQL, unsecureAttachmentRecordSQL } from './security-queries'; - -describe('unsecureAttachmentRecordSQL', () => { - it('returns null when no tableName provided', () => { - const response = unsecureAttachmentRecordSQL((null as unknown) as string, 'token'); - - expect(response).to.be.null; - }); - - it('returns null when no securityToken provided', () => { - const response = unsecureAttachmentRecordSQL('table', (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns a SQLStatement', () => { - const response = unsecureAttachmentRecordSQL('table', 'token'); - - expect(response).to.not.be.null; - }); -}); - -describe('secureAttachmentRecordSQL', () => { - it('returns null when no attachmentId provided', () => { - const response = secureAttachmentRecordSQL((null as unknown) as number, 'table', 1); - - expect(response).to.be.null; - }); - - it('returns null when no tableName provided', () => { - const response = secureAttachmentRecordSQL(1, (null as unknown) as string, 1); - - expect(response).to.be.null; - }); - - it('returns null when no projectId provided', () => { - const response = secureAttachmentRecordSQL(1, 'table', (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a SQLStatement', () => { - const response = secureAttachmentRecordSQL(1, 'table', 3); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/security/security-queries.ts b/api/src/queries/security/security-queries.ts deleted file mode 100644 index 7e2d89034b..0000000000 --- a/api/src/queries/security/security-queries.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to unsecure an attachment record. - * - * @param {string} tableName - * @param {any} securityToken - * @returns {SQLStatement} sql query object - */ -export const unsecureAttachmentRecordSQL = (tableName: string, securityToken: any): SQLStatement | null => { - if (!securityToken || !tableName) { - return null; - } - - const sqlStatement: SQLStatement = SQL`select api_unsecure_attachment_record(${tableName}, ${securityToken})`; - - return sqlStatement; -}; - -/** - * SQL query to secure an attachment record. - * - * @param {number} attachmentId - * @param {string} tableName - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const secureAttachmentRecordSQL = ( - attachmentId: number, - tableName: string, - projectId: number -): SQLStatement | null => { - if (!attachmentId || !tableName || !projectId) { - return null; - } - - const sqlStatement: SQLStatement = SQL`select api_secure_attachment_record(${attachmentId}, ${tableName}, ${projectId})`; - - return sqlStatement; -}; diff --git a/api/src/queries/survey/index.ts b/api/src/queries/survey/index.ts deleted file mode 100644 index c7ff747f23..0000000000 --- a/api/src/queries/survey/index.ts +++ /dev/null @@ -1,19 +0,0 @@ -import * as surveyAttachments from './survey-attachments-queries'; -import * as surveyCreate from './survey-create-queries'; -import * as surveyDelete from './survey-delete-queries'; -import * as surveyOccurrence from './survey-occurrence-queries'; -import * as surveySummary from './survey-summary-queries'; -import * as surveyUpdate from './survey-update-queries'; -import * as surveyView from './survey-view-queries'; -import * as surveyViewUpdate from './survey-view-update-queries'; - -export default { - ...surveyAttachments, - ...surveyCreate, - ...surveyDelete, - ...surveyOccurrence, - ...surveySummary, - ...surveyUpdate, - ...surveyView, - ...surveyViewUpdate -}; diff --git a/api/src/queries/survey/survey-attachments-queries.test.ts b/api/src/queries/survey/survey-attachments-queries.test.ts deleted file mode 100644 index c0f7341bfe..0000000000 --- a/api/src/queries/survey/survey-attachments-queries.test.ts +++ /dev/null @@ -1,431 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { IReportAttachmentAuthor, PutReportAttachmentMetadata } from '../../models/project-survey-attachments'; -import { - deleteSurveyAttachmentSQL, - deleteSurveyReportAttachmentAuthorsSQL, - deleteSurveyReportAttachmentSQL, - getSurveyAttachmentByFileNameSQL, - getSurveyAttachmentS3KeySQL, - getSurveyAttachmentsSQL, - getSurveyReportAttachmentByFileNameSQL, - getSurveyReportAttachmentS3KeySQL, - getSurveyReportAttachmentSQL, - getSurveyReportAttachmentsSQL, - getSurveyReportAuthorsSQL, - insertSurveyReportAttachmentAuthorSQL, - postSurveyAttachmentSQL, - postSurveyReportAttachmentSQL, - putSurveyAttachmentSQL, - putSurveyReportAttachmentSQL, - updateSurveyReportAttachmentMetadataSQL -} from './survey-attachments-queries'; - -const post_sample_attachment_meta = { - title: 'title', - year_published: 2000, - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ], - description: 'description' -}; - -const put_sample_attachment_meta = { - title: 'title', - year_published: 2000, - authors: [ - { - first_name: 'John', - last_name: 'Smith' - } - ], - description: 'description', - revision_count: 0 -}; - -describe('getSurveyAttachmentsSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyAttachmentsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid surveyId provided', () => { - const response = getSurveyAttachmentsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyAttachmentSQL', () => { - it('returns null response when null attachmentId provided', () => { - const response = deleteSurveyAttachmentSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid attachmentId provided', () => { - const response = deleteSurveyAttachmentSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('putSurveyReportAttachmentSQL', () => { - it('returns null response when null fileName provided', () => { - const response = putSurveyReportAttachmentSQL(1, (null as unknown) as string, put_sample_attachment_meta); - - expect(response).to.be.null; - }); - - it('returns null response when null surveyId provided', () => { - const response = putSurveyReportAttachmentSQL((null as unknown) as number, 'name', put_sample_attachment_meta); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = putSurveyReportAttachmentSQL(1, 'name', put_sample_attachment_meta); - - expect(response).to.not.be.null; - }); -}); - -describe('updateSurveyReportAttachmentMetadataSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = updateSurveyReportAttachmentMetadataSQL( - (null as unknown) as number, - 1, - put_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = updateSurveyReportAttachmentMetadataSQL( - 1, - (null as unknown) as number, - put_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null metadata provided', () => { - const response = updateSurveyReportAttachmentMetadataSQL(1, 1, (null as unknown) as PutReportAttachmentMetadata); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = updateSurveyReportAttachmentMetadataSQL(1, 2, put_sample_attachment_meta); - - expect(response).to.not.be.null; - }); -}); - -describe('postSurveyReportAttachmentSQL', () => { - it('returns null response when null fileName provided', () => { - const response = postSurveyReportAttachmentSQL( - (null as unknown) as string, - 30, - 1, - 'key', - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null fileSize provided', () => { - const response = postSurveyReportAttachmentSQL( - 'name', - (null as unknown) as number, - 1, - 'key', - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns null response when null projectId provided', () => { - const response = postSurveyReportAttachmentSQL( - 'name', - 30, - (null as unknown) as number, - 'key', - post_sample_attachment_meta - ); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = postSurveyReportAttachmentSQL('name', 30, 1, 'key', post_sample_attachment_meta); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyReportAttachmentSQL', () => { - it('returns null response when null attachmentId provided', () => { - const response = deleteSurveyReportAttachmentSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid attachmentId provided', () => { - const response = deleteSurveyReportAttachmentSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyReportAttachmentsSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyReportAttachmentsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid surveyId provided', () => { - const response = getSurveyReportAttachmentsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyAttachmentS3KeySQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyAttachmentS3KeySQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = getSurveyAttachmentS3KeySQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid surveyId and attachmentId provided', () => { - const response = getSurveyAttachmentS3KeySQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('postSurveyAttachmentSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = postSurveyAttachmentSQL('name', 20, 'type', (null as unknown) as number, 'key'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = postSurveyAttachmentSQL((null as unknown) as string, 20, 'type', 1, 'key'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileSize provided', () => { - const response = postSurveyAttachmentSQL('name', (null as unknown) as number, 'type', 1, 'key'); - - expect(response).to.be.null; - }); - - it('returns null response when null surveyId provided', () => { - const response = postSurveyAttachmentSQL('name', 20, 'type', 1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns null response when null key provided', () => { - const response = postSurveyAttachmentSQL('name', 20, 'type', 1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns null response when null fileType provided', () => { - const response = postSurveyAttachmentSQL('name', 20, (null as unknown) as string, 1, 'key'); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = postSurveyAttachmentSQL('name', 20, 'type', 1, 'key'); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyAttachmentByFileNameSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyAttachmentByFileNameSQL((null as unknown) as number, 'name'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = getSurveyAttachmentByFileNameSQL(1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid surveyId and fileName provided', () => { - const response = getSurveyAttachmentByFileNameSQL(1, 'name'); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyReportAttachmentByFileNameSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyReportAttachmentByFileNameSQL((null as unknown) as number, 'name'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = getSurveyReportAttachmentByFileNameSQL(1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid surveyId and fileName provided', () => { - const response = getSurveyReportAttachmentByFileNameSQL(1, 'name'); - - expect(response).to.not.be.null; - }); -}); - -describe('putSurveyAttachmentSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = putSurveyAttachmentSQL((null as unknown) as number, 'name', 'type'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileName provided', () => { - const response = putSurveyAttachmentSQL(1, (null as unknown) as string, 'type'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileType provided', () => { - const response = putSurveyAttachmentSQL(1, 'name', (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = putSurveyAttachmentSQL(1, 'name', 'type'); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveyReportAttachmentAuthorSQL', () => { - const report_attachment_author: IReportAttachmentAuthor = { - first_name: 'John', - last_name: 'Smith' - }; - it('returns null response when null attachmentId provided', () => { - const response = insertSurveyReportAttachmentAuthorSQL((null as unknown) as number, report_attachment_author); - - expect(response).to.be.null; - }); - - it('returns null response when null report author provided', () => { - const response = insertSurveyReportAttachmentAuthorSQL(1, (null as unknown) as IReportAttachmentAuthor); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmmentId and null report author are provided', () => { - const response = insertSurveyReportAttachmentAuthorSQL( - (null as unknown) as number, - (null as unknown) as IReportAttachmentAuthor - ); - expect(response).to.be.null; - }); - - it('returns not null response when valid parameters are provided', () => { - const response = insertSurveyReportAttachmentAuthorSQL(1, report_attachment_author); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyReportAttachmentAuthorsSQL', () => { - it('returns null response when null attachmentId provided', () => { - const response = deleteSurveyReportAttachmentAuthorsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns not null response when valid params are provided', () => { - const response = deleteSurveyReportAttachmentAuthorsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyReportAuthorSQL', () => { - it('returns null response when null projectReportAttachmentId provided', () => { - const response = getSurveyReportAuthorsSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectReportAttachmentId provided', () => { - const response = getSurveyReportAuthorsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyReportAttachmentSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyReportAttachmentSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = getSurveyReportAttachmentSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectReportAttachmentId provided', () => { - const response = getSurveyReportAttachmentSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyReportAttachmentS3KeySQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getSurveyReportAttachmentS3KeySQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null attachmentId provided', () => { - const response = getSurveyReportAttachmentS3KeySQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid projectReportAttachmentId provided', () => { - const response = getSurveyReportAttachmentS3KeySQL(1, 2); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-attachments-queries.ts b/api/src/queries/survey/survey-attachments-queries.ts deleted file mode 100644 index c934a7aef6..0000000000 --- a/api/src/queries/survey/survey-attachments-queries.ts +++ /dev/null @@ -1,532 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { - IReportAttachmentAuthor, - PostReportAttachmentMetadata, - PutReportAttachmentMetadata -} from '../../models/project-survey-attachments'; - -/** - * SQL query to get attachments for a single survey. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveyAttachmentsSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - survey_attachment_id as id, - file_name, - update_date, - create_date, - file_size, - file_type, - key, - security_token - from - survey_attachment - where - survey_id = ${surveyId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get the list of report attachments for a single survey. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveyReportAttachmentsSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - survey_report_attachment_id as id, - file_name, - update_date, - create_date, - file_size, - key, - security_token - from - survey_report_attachment - where - survey_id = ${surveyId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get report attachments for a single survey. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveyReportAttachmentSQL = (surveyId: number, attachmentId: number): SQLStatement | null => { - if (!surveyId || !attachmentId) { - return null; - } - const sqlStatement: SQLStatement = SQL` - SELECT - survey_report_attachment_id as attachment_id, - file_name, - title, - description, - year as year_published, - update_date, - create_date, - file_size, - key, - security_token, - revision_count - FROM - survey_report_attachment - where - survey_report_attachment_id = ${attachmentId} - and - survey_id = ${surveyId} - `; - - return sqlStatement; -}; - -/** - * SQL query to delete an attachment for a single survey. - * - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const deleteSurveyAttachmentSQL = (attachmentId: number): SQLStatement | null => { - if (!attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from survey_attachment - WHERE - survey_attachment_id = ${attachmentId} - RETURNING - key; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete a report attachment for a single survey. - * - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const deleteSurveyReportAttachmentSQL = (attachmentId: number): SQLStatement | null => { - if (!attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from survey_report_attachment - WHERE - survey_report_attachment_id = ${attachmentId} - RETURNING - key; - `; - - return sqlStatement; -}; - -/** - * SQL query to get S3 key of an attachment for a single survey. - * - * @param {number} surveyId - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const getSurveyAttachmentS3KeySQL = (surveyId: number, attachmentId: number): SQLStatement | null => { - if (!surveyId || !attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - key - FROM - survey_attachment - WHERE - survey_id = ${surveyId} - AND - survey_attachment_id = ${attachmentId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get S3 key of a report attachment for a single survey. - * - * @param {number} surveyId - * @param {number} attachmentId - * @returns {SQLStatement} sql query object - */ -export const getSurveyReportAttachmentS3KeySQL = (surveyId: number, attachmentId: number): SQLStatement | null => { - if (!surveyId || !attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - key - FROM - survey_report_attachment - WHERE - survey_id = ${surveyId} - AND - survey_report_attachment_id = ${attachmentId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a survey attachment row. - * - * @param {string} fileName - * @param {number} fileSize - * @param {string} fileType - * @param {number} surveyId - * @param {string} key to use in s3 - * @returns {SQLStatement} sql query object - */ -export const postSurveyAttachmentSQL = ( - fileName: string, - fileSize: number, - fileType: string, - surveyId: number, - key: string -): SQLStatement | null => { - if (!fileName || !fileSize || !fileType || !surveyId || !key) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO survey_attachment ( - survey_id, - file_name, - file_size, - file_type, - key - ) VALUES ( - ${surveyId}, - ${fileName}, - ${fileSize}, - ${fileType}, - ${key} - ) - RETURNING - survey_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a survey report attachment row. - * - * @param {string} fileName - * @param {number} fileSize - * @param {number} projectId - * @param {number} surveyId - * @param {string} key to use in s3 - * @returns {SQLStatement} sql query object - */ -export const postSurveyReportAttachmentSQL = ( - fileName: string, - fileSize: number, - surveyId: number, - key: string, - attachmentMeta: PostReportAttachmentMetadata -): SQLStatement | null => { - if (!fileName || !fileSize || !surveyId || !key) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO survey_report_attachment ( - survey_id, - file_name, - title, - year, - description, - file_size, - key - ) VALUES ( - ${surveyId}, - ${fileName}, - ${attachmentMeta.title}, - ${attachmentMeta.year_published}, - ${attachmentMeta.description}, - ${fileSize}, - ${key} - ) - RETURNING - survey_report_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -/** - * SQL query to get an attachment for a single survey by survey id and filename. - * - * @param {number} surveyId - * @param {string} fileName - * @returns {SQLStatement} sql query object - */ -export const getSurveyAttachmentByFileNameSQL = (surveyId: number, fileName: string): SQLStatement | null => { - if (!surveyId || !fileName) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - survey_attachment_id as id, - file_name, - update_date, - create_date, - file_size - from - survey_attachment - where - survey_id = ${surveyId} - and - file_name = ${fileName}; - `; - - return sqlStatement; -}; - -/** - * SQL query to get an attachment for a single survey by survey id and filename. - * - * @param {number} surveyId - * @param {string} fileName - * @returns {SQLStatement} sql query object - */ -export const getSurveyReportAttachmentByFileNameSQL = (surveyId: number, fileName: string): SQLStatement | null => { - if (!surveyId || !fileName) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - survey_report_attachment_id as id, - file_name, - update_date, - create_date, - file_size - from - survey_report_attachment - where - survey_id = ${surveyId} - and - file_name = ${fileName}; - `; - - return sqlStatement; -}; - -/** - * SQL query to update an attachment for a single survey by survey id and filename. - * - * @param {number} surveyId - * @param {string} fileName - * @param {string} fileType - * @returns {SQLStatement} sql query object - */ -export const putSurveyAttachmentSQL = (surveyId: number, fileName: string, fileType: string): SQLStatement | null => { - if (!surveyId || !fileName || !fileType) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - survey_attachment - SET - file_name = ${fileName}, - file_type = ${fileType} - WHERE - file_name = ${fileName} - AND - survey_id = ${surveyId} - RETURNING - survey_attachment_id as id, - revision_count; - - `; - - return sqlStatement; -}; - -/** - * SQL query to update a report attachment for a single survey by survey id and filename. - * - * @param {number} surveyId - * @param {string} fileName - * @returns {SQLStatement} sql query object - */ -export const putSurveyReportAttachmentSQL = ( - surveyId: number, - fileName: string, - attachmentMeta: PutReportAttachmentMetadata -): SQLStatement | null => { - if (!surveyId || !fileName) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - survey_report_attachment - SET - file_name = ${fileName}, - title = ${attachmentMeta.title}, - year = ${attachmentMeta.year_published}, - description = ${attachmentMeta.description} - WHERE - file_name = ${fileName} - AND - survey_id = ${surveyId} - RETURNING - survey_report_attachment_id as id, - revision_count; - `; - - return sqlStatement; -}; - -export interface ReportAttachmentMeta { - title: string; - description: string; - yearPublished: string; -} - -/** - * Update the metadata fields of survey report attachment, for the specified `surveyId` and `attachmentId`. - * - * @param {number} surveyId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const updateSurveyReportAttachmentMetadataSQL = ( - surveyId: number, - attachmentId: number, - metadata: PutReportAttachmentMetadata -): SQLStatement | null => { - if (!surveyId || !attachmentId || !metadata) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - survey_report_attachment - SET - title = ${metadata.title}, - year = ${metadata.year_published}, - description = ${metadata.description} - WHERE - survey_id = ${surveyId} - AND - survey_report_attachment_id = ${attachmentId} - AND - revision_count = ${metadata.revision_count}; - `; - - return sqlStatement; -}; - -/** - * Insert a new survey report attachment author record, for the specified `attachmentId` - * - * @param {number} attachmentId - * @param {IReportAttachmentAuthor} author - * @return {*} {(SQLStatement | null)} - */ -export const insertSurveyReportAttachmentAuthorSQL = ( - attachmentId: number, - author: IReportAttachmentAuthor -): SQLStatement | null => { - if (!attachmentId || !author) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO survey_report_author ( - survey_report_attachment_id, - first_name, - last_name - ) VALUES ( - ${attachmentId}, - ${author.first_name}, - ${author.last_name} - ); - `; - - return sqlStatement; -}; - -/** - * Delete all project report attachment author records, for the specified `attachmentId`. - * - * @param {number} attachmentId - * @return {*} {(SQLStatement | null)} - */ -export const deleteSurveyReportAttachmentAuthorsSQL = (attachmentId: number): SQLStatement | null => { - if (!attachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE FROM - survey_report_author - WHERE - survey_report_attachment_id = ${attachmentId}; - `; - - return sqlStatement; -}; - -/** - * Get the metadata fields of survey report attachment, for the specified `surveyId` and `attachmentId`. - * - * @param {number} surveyId - * @param {number} attachmentId - * @param {PutReportAttachmentMetadata} metadata - * @return {*} {(SQLStatement | null)} - */ -export const getSurveyReportAuthorsSQL = (surveyReportAttachmentId: number): SQLStatement | null => { - if (!surveyReportAttachmentId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - SELECT - survey_report_author.* - FROM - survey_report_author - where - survey_report_attachment_id = ${surveyReportAttachmentId} - `; - - return sqlStatement; -}; diff --git a/api/src/queries/survey/survey-create-queries.test.ts b/api/src/queries/survey/survey-create-queries.test.ts deleted file mode 100644 index d9290f9200..0000000000 --- a/api/src/queries/survey/survey-create-queries.test.ts +++ /dev/null @@ -1,211 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { PostProprietorData, PostSurveyObject } from '../../models/survey-create'; -import { - insertSurveyFundingSourceSQL, - postAncillarySpeciesSQL, - postFocalSpeciesSQL, - postNewSurveyPermitSQL, - postSurveyProprietorSQL, - postSurveySQL -} from './survey-create-queries'; - -describe('postSurveySQL', () => { - it('returns null when null projectId param provided', () => { - const survey = new PostSurveyObject(); - const response = postSurveySQL((null as unknown) as number, survey); - - expect(response).to.be.null; - }); - - it('returns null when null survey data param provided', () => { - const response = postSurveySQL(1, (null as unknown) as PostSurveyObject); - - expect(response).to.be.null; - }); - - it('returns a sql statement when geometry array is empty', () => { - const surveyData = { - survey_details: { - survey_name: 'survey_name', - start_date: '2020/04/03', - end_date: '2020/05/05', - biologist_first_name: 'John', - biologist_last_name: 'Smith' - }, - purpose_and_methodology: { - field_method_id: 1, - additional_details: 'details', - ecological_season_id: 2, - intended_outcome_id: 3, - surveyed_all_areas: true - }, - location: { - survey_area_name: 'some place', - geometry: [] - } - }; - const postSurveyObject = new PostSurveyObject(surveyData); - const response = postSurveySQL(1, postSurveyObject); - - expect(response).to.not.be.null; - }); - - it('returns a sql statement when all values provided', () => { - const surveyData = { - survey_details: { - survey_name: 'survey_name', - start_date: '2020/04/03', - end_date: '2020/05/05', - biologist_first_name: 'John', - biologist_last_name: 'Smith' - }, - purpose_and_methodology: { - field_method_id: 1, - additional_details: 'details', - ecological_season_id: 2, - intended_outcome_id: 3, - surveyed_all_areas: true - }, - location: { - survey_area_name: 'some place', - geometry: [ - { - type: 'Feature', - id: 'myGeo', - geometry: { - type: 'Polygon', - coordinates: [ - [ - [-128, 55], - [-128, 55.5], - [-128, 56], - [-126, 58], - [-128, 55] - ] - ] - }, - properties: { - name: 'Biohub Islands' - } - } - ] - } - }; - - const postSurveyObject = new PostSurveyObject(surveyData); - const response = postSurveySQL(1, postSurveyObject); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include( - '{"type":"Polygon","coordinates":[[[-128,55],[-128,55.5],[-128,56],[-126,58],[-128,55]]]}' - ); - }); -}); - -describe('postSurveyProprietorSQL', () => { - it('returns a sql statement', () => { - const postSurveyProprietorData = new PostProprietorData(null); - const response = postSurveyProprietorSQL(1, postSurveyProprietorData); - - expect(response).to.not.be.null; - }); -}); - -describe('postFocalSpeciesSQL', () => { - it('returns null when null speciesId provided', () => { - const response = postFocalSpeciesSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null when null surveyId provided', () => { - const response = postFocalSpeciesSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns sql statement when valid params provided', () => { - const response = postFocalSpeciesSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('postAncillarySpeciesSQL', () => { - it('returns null when null speciesId provided', () => { - const response = postAncillarySpeciesSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null when null surveyId provided', () => { - const response = postAncillarySpeciesSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns sql statement when valid params provided', () => { - const response = postAncillarySpeciesSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); - -describe('postNewSurveyPermitSQL', () => { - it('returns null when null projectId provided', () => { - const response = postNewSurveyPermitSQL(1, (null as unknown) as number, 1, '123', 'scientific'); - - expect(response).to.be.null; - }); - - it('returns null when null surveyId provided', () => { - const response = postNewSurveyPermitSQL(1, 1, (null as unknown) as number, '123', 'scientific'); - - expect(response).to.be.null; - }); - - it('returns null when null permitNumber provided', () => { - const response = postNewSurveyPermitSQL(1, 1, 2, (null as unknown) as string, 'scientific'); - - expect(response).to.be.null; - }); - - it('returns null when null permitType provided', () => { - const response = postNewSurveyPermitSQL(1, 1, 2, '123', (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns null when null systemUserId provided', () => { - const response = postNewSurveyPermitSQL(null, 1, 2, '123', 'scientific'); - - expect(response).to.be.null; - }); - - it('returns sql statement when valid params provided', () => { - const response = postNewSurveyPermitSQL(1, 1, 2, '123', 'scientific'); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveyFundingSourceSQL', () => { - it('returns null when null surveyId provided', () => { - const response = insertSurveyFundingSourceSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null when null fundingSourceId provided', () => { - const response = insertSurveyFundingSourceSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns sql statement when valid params provided', () => { - const response = insertSurveyFundingSourceSQL(1, 2); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-create-queries.ts b/api/src/queries/survey/survey-create-queries.ts deleted file mode 100644 index 833bed1103..0000000000 --- a/api/src/queries/survey/survey-create-queries.ts +++ /dev/null @@ -1,249 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { PostProprietorData, PostSurveyObject } from '../../models/survey-create'; -import { queries } from '../queries'; - -/** - * SQL query to insert a survey row. - * - * @param {number} projectId - * @param {PostSurveyObject} survey - * @returns {SQLStatement} sql query object - */ -export const postSurveySQL = (projectId: number, survey: PostSurveyObject): SQLStatement | null => { - if (!projectId || !survey) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO survey ( - project_id, - name, - start_date, - end_date, - lead_first_name, - lead_last_name, - field_method_id, - additional_details, - ecological_season_id, - intended_outcome_id, - surveyed_all_areas, - location_name, - geojson, - geography - ) VALUES ( - ${projectId}, - ${survey.survey_details.survey_name}, - ${survey.survey_details.start_date}, - ${survey.survey_details.end_date}, - ${survey.survey_details.biologist_first_name}, - ${survey.survey_details.biologist_last_name}, - ${survey.purpose_and_methodology.field_method_id}, - ${survey.purpose_and_methodology.additional_details}, - ${survey.purpose_and_methodology.ecological_season_id}, - ${survey.purpose_and_methodology.intended_outcome_id}, - ${survey.purpose_and_methodology.surveyed_all_areas}, - ${survey.location.survey_area_name}, - ${JSON.stringify(survey.location.geometry)} - `; - - if (survey.location.geometry && survey.location.geometry.length) { - const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(survey.location.geometry); - - sqlStatement.append(SQL` - ,public.geography( - public.ST_Force2D( - public.ST_SetSRID( - `); - - sqlStatement.append(geometryCollectionSQL); - - sqlStatement.append(SQL` - , 4326))) - `); - } else { - sqlStatement.append(SQL` - ,null - `); - } - - sqlStatement.append(SQL` - ) - RETURNING - survey_id as id; - `); - - return sqlStatement; -}; - -/** - * SQL query to insert a survey_proprietor row. - * - * @param {number} surveyId - * @param {PostProprietorData} surveyProprietor - * @returns {SQLStatement} sql query object - */ -export const postSurveyProprietorSQL = (surveyId: number, survey_proprietor: PostProprietorData): SQLStatement => { - return SQL` - INSERT INTO survey_proprietor ( - survey_id, - proprietor_type_id, - first_nations_id, - rationale, - proprietor_name, - disa_required - ) VALUES ( - ${surveyId}, - ${survey_proprietor.prt_id}, - ${survey_proprietor.fn_id}, - ${survey_proprietor.rationale}, - ${survey_proprietor.proprietor_name}, - ${survey_proprietor.disa_required} - ) - RETURNING - survey_proprietor_id as id; - `; -}; - -/** - * SQL query to insert a survey funding source row into the survey_funding_source table. - * - * @param {number} surveyId - * @param {number} fundingSourceId - * @returns {SQLStatement} sql query object - */ -export const insertSurveyFundingSourceSQL = (surveyId: number, fundingSourceId: number): SQLStatement | null => { - if (!surveyId || !fundingSourceId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO survey_funding_source ( - survey_id, - project_funding_source_id - ) VALUES ( - ${surveyId}, - ${fundingSourceId} - ); - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a survey permit row into the permit table. - * - * @param {number | null} systemUserId - * @param {number} projectId - * @param {number} surveyId - * @param {string} permitNumber - * @param {string} permitType - * @returns {SQLStatement} sql query object - */ -export const postNewSurveyPermitSQL = ( - systemUserId: number | null, - projectId: number, - surveyId: number, - permitNumber: string, - permitType: string -): SQLStatement | null => { - if (!systemUserId || !projectId || !surveyId || !permitNumber || !permitType) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO permit ( - system_user_id, - project_id, - survey_id, - number, - type - ) VALUES ( - ${systemUserId}, - ${projectId}, - ${surveyId}, - ${permitNumber}, - ${permitType} - ); - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a focal species row into the study_species table. - * - * @param {number} speciesId - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const postFocalSpeciesSQL = (speciesId: number, surveyId: number): SQLStatement | null => { - if (!speciesId || !surveyId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO study_species ( - wldtaxonomic_units_id, - is_focal, - survey_id - ) VALUES ( - ${speciesId}, - TRUE, - ${surveyId} - ) RETURNING study_species_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a ancillary species row into the study_species table. - * - * @param {number} speciesId - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const postAncillarySpeciesSQL = (speciesId: number, surveyId: number): SQLStatement | null => { - if (!speciesId || !surveyId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO study_species ( - wldtaxonomic_units_id, - is_focal, - survey_id - ) VALUES ( - ${speciesId}, - FALSE, - ${surveyId} - ) RETURNING study_species_id as id; - `; - - return sqlStatement; -}; - -/** - * SQL query to insert a ancillary species row into the study_species table. - * - * @param {number} speciesId - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const postVantageCodesSQL = (vantageCodeId: number, surveyId: number): SQLStatement | null => { - if (!vantageCodeId || !surveyId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO survey_vantage ( - vantage_id, - survey_id - ) VALUES ( - ${vantageCodeId}, - ${surveyId} - ) RETURNING survey_vantage_id as id; - `; - - return sqlStatement; -}; diff --git a/api/src/queries/survey/survey-delete-queries.test.ts b/api/src/queries/survey/survey-delete-queries.test.ts deleted file mode 100644 index 883415adcd..0000000000 --- a/api/src/queries/survey/survey-delete-queries.test.ts +++ /dev/null @@ -1,64 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - deleteAllSurveySpeciesSQL, - deleteSurveyFundingSourceByProjectFundingSourceIdSQL, - deleteSurveyFundingSourcesBySurveyIdSQL, - deleteSurveyProprietorSQL, - deleteSurveySQL, - deleteSurveyVantageCodesSQL -} from './survey-delete-queries'; - -describe('deleteAllSurveySpeciesSQL', () => { - it('returns a sql statement', () => { - const response = deleteAllSurveySpeciesSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyProprietorSQL', () => { - it('returns a sql statement', () => { - const response = deleteSurveyProprietorSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveySQL', () => { - it('returns a sql statement', () => { - const response = deleteSurveySQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyFundingSourcesBySurveyIdSQL', () => { - it('returns a sql statement', () => { - const response = deleteSurveyFundingSourcesBySurveyIdSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyVantageCodesSQL', () => { - it('returns a sql statement', () => { - const response = deleteSurveyVantageCodesSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyFundingSourceByProjectFundingSourceIdSQL', () => { - it('returns null when project funding source id is null', () => { - const response = deleteSurveyFundingSourceByProjectFundingSourceIdSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when valid params passed in', () => { - const response = deleteSurveyFundingSourceByProjectFundingSourceIdSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-delete-queries.ts b/api/src/queries/survey/survey-delete-queries.ts deleted file mode 100644 index e7389b6c03..0000000000 --- a/api/src/queries/survey/survey-delete-queries.ts +++ /dev/null @@ -1,95 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to delete survey funding sources rows based on survey id. - * - * @param {number} surveyIdF - * @returns {SQLStatement} sql query object - */ -export const deleteSurveyFundingSourcesBySurveyIdSQL = (surveyId: number): SQLStatement => { - return SQL` - DELETE - from survey_funding_source - WHERE - survey_id = ${surveyId}; - `; -}; - -/** - * SQL query to delete survey funding sources rows based on project funding source id. - * - * @param {number | undefined} projectFundingSourceId - * @returns {SQLStatement} sql query object - */ -export const deleteSurveyFundingSourceByProjectFundingSourceIdSQL = ( - projectFundingSourceId: number | undefined -): SQLStatement | null => { - if (!projectFundingSourceId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - DELETE - from survey_funding_source - WHERE - project_funding_source_id = ${projectFundingSourceId}; - `; - - return sqlStatement; -}; - -/** - * SQL query to delete all survey species rows. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const deleteAllSurveySpeciesSQL = (surveyId: number): SQLStatement => { - return SQL` - DELETE - from study_species - WHERE - survey_id = ${surveyId}; - `; -}; - -/** - * SQL query to delete survey proprietor rows. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const deleteSurveyProprietorSQL = (surveyId: number): SQLStatement => { - return SQL` - DELETE - from survey_proprietor - WHERE - survey_id = ${surveyId}; - `; -}; - -/** - * SQL query to delete a survey row (and associated data) based on survey ID. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const deleteSurveySQL = (surveyId: number): SQLStatement => { - return SQL`call api_delete_survey(${surveyId})`; -}; - -/** - * SQL query to delete survey proprietor rows. - * - * @param {number} surveyId - * @param {number} surveyProprietorId - * @returns {SQLStatement} sql query object - */ -export const deleteSurveyVantageCodesSQL = (surveyId: number): SQLStatement => { - return SQL` - DELETE - from survey_vantage - WHERE - survey_id = ${surveyId}; - `; -}; diff --git a/api/src/queries/survey/survey-occurrence-queries.test.ts b/api/src/queries/survey/survey-occurrence-queries.test.ts deleted file mode 100644 index dd3efd5a0d..0000000000 --- a/api/src/queries/survey/survey-occurrence-queries.test.ts +++ /dev/null @@ -1,265 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - deleteOccurrenceSubmissionSQL, - deleteSurveyOccurrencesSQL, - getLatestSurveyOccurrenceSubmissionSQL, - getOccurrenceSubmissionMessagesSQL, - getSurveyOccurrenceSubmissionSQL, - getTemplateMethodologySpeciesRecordSQL, - insertOccurrenceSubmissionMessageSQL, - insertOccurrenceSubmissionStatusSQL, - insertSurveyOccurrenceSubmissionSQL, - updateSurveyOccurrenceSubmissionSQL -} from './survey-occurrence-queries'; - -describe('insertSurveyOccurrenceSubmissionSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = insertSurveyOccurrenceSubmissionSQL({ - surveyId: (null as unknown) as number, - source: 'fileSource', - inputKey: 'fileKey' - }); - - expect(response).to.be.null; - }); - - it('returns null response when null source provided', () => { - const response = insertSurveyOccurrenceSubmissionSQL({ - surveyId: 1, - source: (null as unknown) as string, - inputKey: 'fileKey' - }); - - expect(response).to.be.null; - }); - - it('returns non null response when all valid params provided without inputKey', () => { - const response = insertSurveyOccurrenceSubmissionSQL({ - surveyId: 1, - source: 'fileSource', - inputFileName: 'inputFileName', - outputFileName: 'outputFileName', - outputKey: 'outputfileKey' - }); - - expect(response).to.not.be.null; - }); - - it('returns non null response when all valid params provided with inputKey', () => { - const response = insertSurveyOccurrenceSubmissionSQL({ - surveyId: 1, - source: 'fileSource', - inputFileName: 'inputFileName', - inputKey: 'inputfileKey', - outputFileName: 'outputFileName', - outputKey: 'outputfileKey' - }); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteOccurrenceSubmissionSQL', () => { - it('returns null response when null submissionId provided', () => { - const response = deleteOccurrenceSubmissionSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = deleteOccurrenceSubmissionSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getLatestSurveyOccurrenceSubmission', () => { - it('returns null response when null surveyId provided', () => { - const response = getLatestSurveyOccurrenceSubmissionSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getLatestSurveyOccurrenceSubmissionSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('deleteSurveyOccurrencesSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = deleteSurveyOccurrencesSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = deleteSurveyOccurrencesSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('updateSurveyOccurrenceSubmissionSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: (null as unknown) as number, - inputKey: 'fileKey' - }); - - expect(response).to.be.null; - }); - - it('returns null response when null key provided', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: 1, - inputKey: (null as unknown) as string - }); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: 1, - inputKey: 'fileKey', - inputFileName: 'fileName', - outputFileName: 'outputFileName', - outputKey: 'outputKey' - }); - - expect(response).to.not.be.null; - }); - - it('returns non null response when valid params provided without inputKey', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: 1, - inputKey: 'fileKey' - }); - - expect(response).to.not.be.null; - }); - - it('returns non null response when valid params provided without inputFileName', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: 1, - inputFileName: 'fileName' - }); - - expect(response).to.not.be.null; - }); - - it('returns non null response when valid params provided without outputFileName', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: 1, - outputFileName: 'outputFileName' - }); - - expect(response).to.not.be.null; - }); - - it('returns non null response when valid params provided without outputKey', () => { - const response = updateSurveyOccurrenceSubmissionSQL({ - submissionId: 1, - outputKey: 'outputKey' - }); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyOccurrenceSubmissionSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = getSurveyOccurrenceSubmissionSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getSurveyOccurrenceSubmissionSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveySubmissionStatusSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = insertOccurrenceSubmissionStatusSQL((null as unknown) as number, 'type'); - - expect(response).to.be.null; - }); - - it('returns null response when null submissionStatusType provided', () => { - const response = insertOccurrenceSubmissionStatusSQL(1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = insertOccurrenceSubmissionStatusSQL(1, 'type'); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveySubmissionMessageSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = insertOccurrenceSubmissionMessageSQL((null as unknown) as number, 'type', 'message', 'errorcode'); - - expect(response).to.be.null; - }); - - it('returns null response when null submissionStatusType provided', () => { - const response = insertOccurrenceSubmissionMessageSQL(1, (null as unknown) as string, 'message', 'errorcode'); - - expect(response).to.be.null; - }); - - it('returns null response when null submissionMessage provided', () => { - const response = insertOccurrenceSubmissionMessageSQL(1, 'type', (null as unknown) as string, 'errorcode'); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = insertOccurrenceSubmissionMessageSQL(1, 'type', 'message', 'errorcode'); - - expect(response).to.not.be.null; - }); -}); - -describe('getOccurrenceSubmissionMessagesSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = getOccurrenceSubmissionMessagesSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getOccurrenceSubmissionMessagesSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getTemplateMethodologySpeciesRecordSQL', () => { - it('returns null response when null methodologyId provided', () => { - const response = getTemplateMethodologySpeciesRecordSQL((null as unknown) as number, 1); - - expect(response).to.be.null; - }); - - it('returns null response when null templateId provided', () => { - const response = getTemplateMethodologySpeciesRecordSQL(1, (null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getTemplateMethodologySpeciesRecordSQL(1, 1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-occurrence-queries.ts b/api/src/queries/survey/survey-occurrence-queries.ts deleted file mode 100644 index 1c765557e5..0000000000 --- a/api/src/queries/survey/survey-occurrence-queries.ts +++ /dev/null @@ -1,404 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { - AppendSQLColumn, - appendSQLColumns, - AppendSQLColumnsEqualValues, - appendSQLColumnsEqualValues, - AppendSQLValue, - appendSQLValues -} from '../../utils/sql-utils'; - -/** - * SQL query to insert a survey occurrence submission row. - * - * @param {number} surveyId - * @param {string} source - * @param {string} inputFileName - * @param {(number | null)} templateMethodologyId - * @return {*} {(SQLStatement | null)} - */ -export const insertSurveyOccurrenceSubmissionSQL = (data: { - surveyId: number; - source: string; - inputFileName?: string; - inputKey?: string; - outputFileName?: string; - outputKey?: string; -}): SQLStatement | null => { - if (!data || !data.surveyId || !data.source) { - return null; - } - - const dataKeys = Object.keys(data); - - const columnItems: AppendSQLColumn[] = []; - const valueItems: AppendSQLValue[] = []; - - if (dataKeys.includes('inputFileName')) { - columnItems.push({ columnName: 'input_file_name' }); - valueItems.push({ columnValue: data.inputFileName }); - } - - if (dataKeys.includes('inputKey')) { - columnItems.push({ columnName: 'input_key' }); - valueItems.push({ columnValue: data.inputKey }); - } - - if (dataKeys.includes('outputFileName')) { - columnItems.push({ columnName: 'output_file_name' }); - valueItems.push({ columnValue: data.outputFileName }); - } - - if (dataKeys.includes('outputKey')) { - columnItems.push({ columnName: 'output_key' }); - valueItems.push({ columnValue: data.outputKey }); - } - - const sqlStatement: SQLStatement = SQL` - INSERT INTO occurrence_submission ( - survey_id, - source, - event_timestamp, - `; - - appendSQLColumns(sqlStatement, columnItems); - - sqlStatement.append(SQL` - ) VALUES ( - ${data.surveyId}, - ${data.source}, - now(), - `); - - appendSQLValues(sqlStatement, valueItems); - - sqlStatement.append(SQL` - ) - RETURNING - occurrence_submission_id as id; - `); - - return sqlStatement; -}; - -/** - * SQL query to update a survey occurrence submission row. - * - * @param {{ - * submissionId: number; - * inputKey?: string; - * outputFileName?: string; - * outputKey?: string; - * }} data - * @return {*} {(SQLStatement | null)} - */ -export const updateSurveyOccurrenceSubmissionSQL = (data: { - submissionId: number; - inputFileName?: string; - inputKey?: string; - outputFileName?: string; - outputKey?: string; -}): SQLStatement | null => { - if (!data.submissionId || (!data.inputFileName && !data.inputKey && !data.outputFileName && !data.outputKey)) { - return null; - } - - const dataKeys = Object.keys(data); - - const items: AppendSQLColumnsEqualValues[] = []; - - if (dataKeys.includes('inputFileName')) { - items.push({ columnName: 'input_file_name', columnValue: data.inputFileName }); - } - - if (dataKeys.includes('inputKey')) { - items.push({ columnName: 'input_key', columnValue: data.inputKey }); - } - - if (dataKeys.includes('outputFileName')) { - items.push({ columnName: 'output_file_name', columnValue: data.outputFileName }); - } - - if (dataKeys.includes('outputKey')) { - items.push({ columnName: 'output_key', columnValue: data.outputKey }); - } - - const sqlStatement: SQLStatement = SQL` - UPDATE occurrence_submission - SET - `; - - appendSQLColumnsEqualValues(sqlStatement, items); - - sqlStatement.append(SQL` - WHERE - occurrence_submission_id = ${data.submissionId} - RETURNING occurrence_submission_id as id; - `); - - return sqlStatement; -}; - -/** - * SQL query to get latest occurrence submission for a survey. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getLatestSurveyOccurrenceSubmissionSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - os.occurrence_submission_id as id, - os.survey_id, - os.source, - os.delete_timestamp, - os.event_timestamp, - os.input_key, - os.input_file_name, - ss.submission_status_id, - ss.submission_status_type_id, - sst.name as submission_status_type_name, - sm.submission_message_id, - sm.submission_message_type_id, - sm.message, - smt.name as submission_message_type_name - FROM - occurrence_submission as os - LEFT OUTER JOIN - submission_status as ss - ON - os.occurrence_submission_id = ss.occurrence_submission_id - LEFT OUTER JOIN - submission_status_type as sst - ON - sst.submission_status_type_id = ss.submission_status_type_id - LEFT OUTER JOIN - submission_message as sm - ON - sm.submission_status_id = ss.submission_status_id - LEFT OUTER JOIN - submission_message_type as smt - ON - smt.submission_message_type_id = sm.submission_message_type_id - WHERE - os.survey_id = ${surveyId} - ORDER BY - os.event_timestamp DESC - LIMIT 1 - ; - `; -}; - -/** - * SQL query to delete occurrence records by occurrence submission id. - * - * @param {number} occurrenceSubmissionId - * @return {*} {(SQLStatement | null)} - */ -export const deleteSurveyOccurrencesSQL = (occurrenceSubmissionId: number): SQLStatement | null => { - if (!occurrenceSubmissionId) { - return null; - } - - return SQL` - DELETE FROM - occurrence - WHERE - occurrence_submission_id = ${occurrenceSubmissionId}; - `; -}; - -/** - * SQL query to get the record for a single occurrence submission. - * - * @param {number} submissionId - * @returns {SQLStatement} sql query object - */ -export const getSurveyOccurrenceSubmissionSQL = (occurrenceSubmissionId: number): SQLStatement | null => { - if (!occurrenceSubmissionId) { - return null; - } - - return SQL` - SELECT - * - FROM - occurrence_submission - WHERE - occurrence_submission_id = ${occurrenceSubmissionId}; - `; -}; - -/** - * SQL query to soft delete the occurrence submission entry by ID - * - * @param {number} occurrenceSubmissionId - * @returns {SQLStatement} sql query object - */ -export const deleteOccurrenceSubmissionSQL = (occurrenceSubmissionId: number): SQLStatement | null => { - if (!occurrenceSubmissionId) { - return null; - } - - return SQL` - UPDATE occurrence_submission - SET delete_timestamp = now() - WHERE occurrence_submission_id = ${occurrenceSubmissionId}; - `; -}; - -/** - * SQL query to insert the occurrence submission status. - * - * @param {number} occurrenceSubmissionId - * @param {string} submissionStatusType - * @returns {SQLStatement} sql query object - */ -export const insertOccurrenceSubmissionStatusSQL = ( - occurrenceSubmissionId: number, - submissionStatusType: string -): SQLStatement | null => { - if (!occurrenceSubmissionId || !submissionStatusType) { - return null; - } - - return SQL` - INSERT INTO submission_status ( - occurrence_submission_id, - submission_status_type_id, - event_timestamp - ) VALUES ( - ${occurrenceSubmissionId}, - ( - SELECT - submission_status_type_id - FROM - submission_status_type - WHERE - name = ${submissionStatusType} - ), - now() - ) - RETURNING - submission_status_id as id; - `; -}; - -/** - * SQL query to insert the occurrence submission message. - * - * @param {number} occurrenceSubmissionId - * @param {string} submissionStatusType - * @param {string} submissionMessage - * @returns {SQLStatement} sql query object - */ -export const insertOccurrenceSubmissionMessageSQL = ( - submissionStatusId: number, - submissionMessageType: string, - submissionMessage: string, - errorCode: string -): SQLStatement | null => { - if (!submissionStatusId || !submissionMessageType || !submissionMessage || !errorCode) { - return null; - } - - return SQL` - INSERT INTO submission_message ( - submission_status_id, - submission_message_type_id, - event_timestamp, - message - ) VALUES ( - ${submissionStatusId}, - ( - SELECT - submission_message_type_id - FROM - submission_message_type - WHERE - name = ${errorCode} - ), - now(), - ${submissionMessage} - ) - RETURNING - submission_message_id; - `; -}; - -/** - * SQL query to get the list of messages for an occurrence submission. - * - * @param {number} occurrenceSubmissionId - * @returns {SQLStatement} sql query object - */ -export const getOccurrenceSubmissionMessagesSQL = (occurrenceSubmissionId: number): SQLStatement | null => { - if (!occurrenceSubmissionId) { - return null; - } - - return SQL` - SELECT - sm.submission_message_id as id, - smt.name as type, - sst.name as status, - smc.name as class, - sm.message - FROM - occurrence_submission as os - LEFT OUTER JOIN - submission_status as ss - ON - os.occurrence_submission_id = ss.occurrence_submission_id - LEFT OUTER JOIN - submission_status_type as sst - ON - sst.submission_status_type_id = ss.submission_status_type_id - LEFT OUTER JOIN - submission_message as sm - ON - sm.submission_status_id = ss.submission_status_id - LEFT OUTER JOIN - submission_message_type as smt - ON - smt.submission_message_type_id = sm.submission_message_type_id - LEFT OUTER JOIN - submission_message_class smc - ON - smc.submission_message_class_id = smt.submission_message_class_id - WHERE - os.occurrence_submission_id = ${occurrenceSubmissionId} - ORDER BY sm.submission_message_id; - `; -}; - -/** - * SQL query to get a template methodology species id. - * - * @param {number} fieldMethodId - * @param {number} templateId - * @return {*} {(SQLStatement | null)} - */ -export const getTemplateMethodologySpeciesRecordSQL = ( - fieldMethodId: number, - templateId: number -): SQLStatement | null => { - if (!fieldMethodId || !templateId) { - return null; - } - - return SQL` - SELECT * - FROM - template_methodology_species tms - WHERE - tms.field_method_id = ${fieldMethodId} - AND - tms.template_id = ${templateId} - ; - `; -}; diff --git a/api/src/queries/survey/survey-summary-queries.test.ts b/api/src/queries/survey/survey-summary-queries.test.ts deleted file mode 100644 index 05d4d722a7..0000000000 --- a/api/src/queries/survey/survey-summary-queries.test.ts +++ /dev/null @@ -1,188 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { PostSummaryDetails } from '../../models/summaryresults-create'; -import { - deleteSummarySubmissionSQL, - getLatestSurveySummarySubmissionSQL, - getSummarySubmissionMessagesSQL, - getSurveySummarySubmissionSQL, - insertSurveySummaryDetailsSQL, - insertSurveySummarySubmissionMessageSQL, - insertSurveySummarySubmissionSQL, - updateSurveySummarySubmissionWithKeySQL -} from './survey-summary-queries'; - -describe('deleteSummarySubmissionSQL', () => { - it('returns null response when null summarySubmissionId provided', () => { - const response = deleteSummarySubmissionSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = deleteSummarySubmissionSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSummarySubmissionMessagesSQL', () => { - it('returns null response when null summarySubmissionId provided', () => { - const response = getSummarySubmissionMessagesSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getSummarySubmissionMessagesSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveySummarySubmissionMessageSQL', () => { - it('returns null response when null summarySubmissionId provided', () => { - const response = insertSurveySummarySubmissionMessageSQL( - (null as unknown) as number, - 'type', - 'message', - 'errorCode' - ); - - expect(response).to.be.null; - }); - - it('returns null response when null summarySubmissionMessageType provided', () => { - const response = insertSurveySummarySubmissionMessageSQL(1, (null as unknown) as string, 'message', 'errorCode'); - - expect(response).to.be.null; - }); - - it('returns null response when null summarySubmissionMessage provided', () => { - const response = insertSurveySummarySubmissionMessageSQL(1, 'type', (null as unknown) as string, 'errorCode'); - - expect(response).to.be.null; - }); - - it('returns null response when null errorCode provided', () => { - const response = insertSurveySummarySubmissionMessageSQL(1, 'type', 'message', (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = insertSurveySummarySubmissionMessageSQL(1, 'type', 'message', 'errorCode'); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveySummarySubmissionSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = insertSurveySummarySubmissionSQL((null as unknown) as number, 'fileSource', 'fileKey'); - - expect(response).to.be.null; - }); - - it('returns null response when null source provided', () => { - const response = insertSurveySummarySubmissionSQL(1, (null as unknown) as string, 'fileKey'); - - expect(response).to.be.null; - }); - - it('returns null response when null fileKey provided', () => { - const response = insertSurveySummarySubmissionSQL(1, 'fileSource', (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = insertSurveySummarySubmissionSQL(1, 'fileSource', 'fileKey'); - - expect(response).to.not.be.null; - }); -}); - -describe('getLatestSurveySummarySubmissionSQL', () => { - it('returns null response when null surveyId provided', () => { - const response = getLatestSurveySummarySubmissionSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getLatestSurveySummarySubmissionSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('updateSurveySummarySubmissionWithKeySQL', () => { - it('returns null response when null surveyId provided', () => { - const response = updateSurveySummarySubmissionWithKeySQL((null as unknown) as number, 'fileKey'); - - expect(response).to.be.null; - }); - - it('returns null response when null key provided', () => { - const response = updateSurveySummarySubmissionWithKeySQL(1, (null as unknown) as string); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = updateSurveySummarySubmissionWithKeySQL(1, 'fileKey'); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveySummarySubmissionSQL', () => { - it('returns null response when null occurrenceSubmissionId provided', () => { - const response = getSurveySummarySubmissionSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = getSurveySummarySubmissionSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveySummaryDetailsSQL', () => { - const obj = { - study_area_id: 'area_name', - parameter: 'density', - stratum: 'S1', - parameter_value: 18, - parameter_estimate: 100, - standard_error: 166.4, - coefficient_variation: 0.4, - confidence_level_percent: 0.9, - confidence_limit_upper: 143.8, - confidence_limit_lower: 0.1, - kilometres_surveyed: 10, - total_area_surveyed_sqm: 10 - }; - const summaryDetailsData = new PostSummaryDetails(obj); - - it('returns null response when null summarySubmissionId provided', () => { - const response = insertSurveySummaryDetailsSQL((null as unknown) as number, summaryDetailsData); - - expect(response).to.be.null; - }); - - it('returns null response when null summaryDetailsData provided', () => { - const response = insertSurveySummaryDetailsSQL(1, (null as unknown) as PostSummaryDetails); - - expect(response).to.be.null; - }); - - it('returns non null response when valid params provided', () => { - const response = insertSurveySummaryDetailsSQL(1, summaryDetailsData); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-summary-queries.ts b/api/src/queries/survey/survey-summary-queries.ts deleted file mode 100644 index 8543501d68..0000000000 --- a/api/src/queries/survey/survey-summary-queries.ts +++ /dev/null @@ -1,279 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; -import { PostSummaryDetails } from '../../models/summaryresults-create'; - -/** - * SQL query to insert a survey summary submission row. - * - * @param {number} surveyId - * @param {string} source - * @param {string} file_name - * @return {*} {(SQLStatement | null)} - */ -export const insertSurveySummarySubmissionSQL = ( - surveyId: number, - source: string, - file_name: string -): SQLStatement | null => { - if (!surveyId || !source || !file_name) { - return null; - } - - return SQL` - INSERT INTO survey_summary_submission ( - survey_id, - source, - file_name, - event_timestamp - ) VALUES ( - ${surveyId}, - ${source}, - ${file_name}, - now() - ) - RETURNING survey_summary_submission_id as id; - `; -}; - -/** - * SQL query to get latest summary submission for a survey. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getLatestSurveySummarySubmissionSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - sss.survey_summary_submission_id as id, - sss.key, - sss.file_name, - sss.delete_timestamp, - sssm.submission_message_type_id, - sssm.message, - ssmt.name as submission_message_type_name, - ssmt.summary_submission_message_class_id, - ssmc.name as submission_message_class_name - FROM - survey_summary_submission as sss - LEFT OUTER JOIN - survey_summary_submission_message as sssm - ON - sss.survey_summary_submission_id = sssm.survey_summary_submission_id - LEFT OUTER JOIN - summary_submission_message_type as ssmt - ON - sssm.submission_message_type_id = ssmt.submission_message_type_id - LEFT OUTER JOIN - summary_submission_message_class as ssmc - ON - ssmt.summary_submission_message_class_id = ssmc.summary_submission_message_class_id - WHERE - sss.survey_id = ${surveyId} - ORDER BY - sss.event_timestamp DESC - LIMIT 1; - `; -}; - -/** - * SQL query to soft delete the summary submission entry by ID - * - * @param {number} summarySubmissionId - * @returns {SQLStatement} sql query object - */ -export const deleteSummarySubmissionSQL = (summarySubmissionId: number): SQLStatement | null => { - if (!summarySubmissionId) { - return null; - } - - return SQL` - UPDATE survey_summary_submission - SET delete_timestamp = now() - WHERE survey_summary_submission_id = ${summarySubmissionId}; - `; -}; - -/** - * SQL query to insert a survey summary submission row. - * - * @param {number} summarySubmissionId - * @param {string} key - * @return {*} {(SQLStatement | null)} - */ -export const updateSurveySummarySubmissionWithKeySQL = ( - summarySubmissionId: number, - key: string -): SQLStatement | null => { - if (!summarySubmissionId || !key) { - return null; - } - - return SQL` - UPDATE survey_summary_submission - SET - key= ${key} - WHERE - survey_summary_submission_id = ${summarySubmissionId} - RETURNING survey_summary_submission_id as id; - `; -}; - -/** - * SQL query to get the record for a single summary submission. - * - * @param {number} submissionId - * @returns {SQLStatement} sql query object - */ -export const getSurveySummarySubmissionSQL = (summarySubmissionId: number): SQLStatement | null => { - if (!summarySubmissionId) { - return null; - } - - return SQL` - SELECT - * - FROM - survey_summary_submission - WHERE - survey_summary_submission_id = ${summarySubmissionId}; - `; -}; - -/** - * SQL query to insert a survey summary submission row. - * - * @param {number} summarySubmissionId - * @param {string} summaryDetails - * @return {*} {(SQLStatement | null)} - */ -export const insertSurveySummaryDetailsSQL = ( - summarySubmissionId: number, - summaryDetails: PostSummaryDetails -): SQLStatement | null => { - if (!summarySubmissionId || !summaryDetails) { - return null; - } - - return SQL` - INSERT INTO survey_summary_detail ( - survey_summary_submission_id, - study_area_id, - parameter, - stratum, - parameter_value, - parameter_estimate, - confidence_limit_lower, - confidence_limit_upper, - confidence_level_percent, - sightability_model, - standard_error, - coefficient_variation, - kilometres_surveyed, - total_area_surveyed_sqm, - outlier_blocks_removed, - analysis_method - ) VALUES ( - ${summarySubmissionId}, - ${summaryDetails.study_area_id}, - ${summaryDetails.parameter}, - ${summaryDetails.stratum}, - ${summaryDetails.parameter_value}, - ${summaryDetails.parameter_estimate}, - ${summaryDetails.confidence_limit_lower}, - ${summaryDetails.confidence_limit_upper}, - ${summaryDetails.confidence_level_percent}, - ${summaryDetails.sightability_model}, - ${summaryDetails.standard_error}, - ${summaryDetails.coefficient_variation}, - ${summaryDetails.kilometres_surveyed}, - ${summaryDetails.total_area_survey_sqm}, - ${summaryDetails.outlier_blocks_removed}, - ${summaryDetails.analysis_method} - ) - RETURNING survey_summary_detail_id as id; - `; -}; - -/** - * SQL query to insert the occurrence submission message. - * - * @param {number} summarySubmissionId - * @param {string} summarySubmissionMessageType - * @param {string} summarySubmissionMessage - * @param {string} errorCode - * @returns {SQLStatement} sql query object - */ -export const insertSurveySummarySubmissionMessageSQL = ( - summarySubmissionId: number, - summarySubmissionMessageType: string, - summarySubmissionMessage: string, - errorCode: string -): SQLStatement | null => { - if (!summarySubmissionId || !summarySubmissionMessageType || !summarySubmissionMessage || !errorCode) { - return null; - } - - return SQL` - INSERT INTO survey_summary_submission_message ( - survey_summary_submission_id, - submission_message_type_id, - event_timestamp, - message - ) VALUES ( - ${summarySubmissionId}, - ( - SELECT - submission_message_type_id - FROM - summary_submission_message_type - WHERE - name = ${errorCode} - ), - now(), - ${summarySubmissionMessage} - ) - RETURNING - submission_message_id; - `; -}; - -/** - * SQL query to get the list of messages for an summary submission. - * - * @param {number} summarySubmissionId - * @returns {SQLStatement} sql query object - */ -export const getSummarySubmissionMessagesSQL = (summarySubmissionId: number): SQLStatement | null => { - if (!summarySubmissionId) { - return null; - } - - return SQL` - SELECT - sssm.submission_message_id as id, - sssm.message, - ssmt.name as type, - ssmc.name as class - FROM - survey_summary_submission as sss - LEFT OUTER JOIN - survey_summary_submission_message as sssm - ON - sssm.survey_summary_submission_id = sss.survey_summary_submission_id - LEFT OUTER JOIN - summary_submission_message_type as ssmt - ON - ssmt.submission_message_type_id = sssm.submission_message_type_id - LEFT OUTER JOIN - summary_submission_message_class as ssmc - ON - ssmc.summary_submission_message_class_id = ssmt.summary_submission_message_class_id - WHERE - sss.survey_summary_submission_id = ${summarySubmissionId} - ORDER BY - sssm.submission_message_id; - `; -}; diff --git a/api/src/queries/survey/survey-update-queries.test.ts b/api/src/queries/survey/survey-update-queries.test.ts deleted file mode 100644 index db770d9913..0000000000 --- a/api/src/queries/survey/survey-update-queries.test.ts +++ /dev/null @@ -1,112 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - PutSurveyDetailsData, - PutSurveyFundingData, - PutSurveyLocationData, - PutSurveyObject, - PutSurveyPermitData, - PutSurveyProprietorData, - PutSurveyPurposeAndMethodologyData, - PutSurveySpeciesData -} from '../../models/survey-update'; -import { - associateSurveyToPermitSQL, - insertSurveyPermitSQL, - putSurveyDetailsSQL, - unassociatePermitFromSurveySQL, - updateSurveyPublishStatusSQL -} from './survey-update-queries'; - -describe('putSurveyDetailsSQL', () => { - it('returns non null response when valid params provided with geometry', () => { - const response = putSurveyDetailsSQL(2, ({ - survey_details: new PutSurveyDetailsData(null), - species: new PutSurveySpeciesData(null), - permit: new PutSurveyPermitData(null), - funding: new PutSurveyFundingData(null), - proprietor: new PutSurveyProprietorData(null), - purpose_and_methodology: new PutSurveyPurposeAndMethodologyData(null), - location: new PutSurveyLocationData(null) - } as unknown) as PutSurveyObject); - - expect(response).to.not.be.null; - }); - - it('returns non null response when valid params provided without geometry', () => { - const response = putSurveyDetailsSQL(2, ({ - survey_details: new PutSurveyDetailsData(null), - species: new PutSurveySpeciesData(null), - permit: new PutSurveyPermitData(null), - funding: new PutSurveyFundingData(null), - proprietor: new PutSurveyProprietorData(null), - purpose_and_methodology: new PutSurveyPurposeAndMethodologyData(null), - location: new PutSurveyLocationData({ - survey_area_name: 'name', - geometry: [ - { - type: 'Feature', - geometry: { - type: 'Polygon', - coordinates: [] - }, - properties: {} - } - ], - revision_count: 0 - }) - } as unknown) as PutSurveyObject); - - expect(response).to.not.be.null; - }); -}); - -describe('unassociatePermitFromSurveySQL', () => { - it('returns a sql statement', () => { - const response = unassociatePermitFromSurveySQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('insertSurveyPermitSQL', () => { - it('returns a sql statement', () => { - const response = insertSurveyPermitSQL(1, 2, 3, '4', 'type'); - - expect(response).not.to.be.null; - }); -}); - -describe('associateSurveyToPermitSQL', () => { - it('returns a sql statement', () => { - const response = associateSurveyToPermitSQL(1, 2, '4'); - - expect(response).not.to.be.null; - }); -}); - -describe('updateSurveyPublishStatusSQL', () => { - describe('with invalid parameters', () => { - it('returns null when survey is null', () => { - const response = updateSurveyPublishStatusSQL((null as unknown) as number, true); - - expect(response).to.be.null; - }); - }); - - describe('with valid parameters', () => { - it('returns a SQLStatement when there is a real date value', () => { - const response = updateSurveyPublishStatusSQL(1, true); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(1); - }); - - it('returns a SQLStatement when the date value is null', () => { - const response = updateSurveyPublishStatusSQL(1, false); - - expect(response).to.not.be.null; - expect(response?.values).to.deep.include(1); - }); - }); -}); diff --git a/api/src/queries/survey/survey-update-queries.ts b/api/src/queries/survey/survey-update-queries.ts deleted file mode 100644 index 2447200939..0000000000 --- a/api/src/queries/survey/survey-update-queries.ts +++ /dev/null @@ -1,202 +0,0 @@ -import { Knex } from 'knex'; -import { SQL, SQLStatement } from 'sql-template-strings'; -import { getKnex } from '../../database/db'; -import { PutSurveyObject } from '../../models/survey-update'; -import { queries } from '../queries'; - -/** - * SQL query to update a permit row based on an old survey association. - * Unset the survey id column (remove the association of the permit to the survey) - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const unassociatePermitFromSurveySQL = (surveyId: number): SQLStatement => { - return SQL` - UPDATE - permit - SET - survey_id = ${null} - WHERE - survey_id = ${surveyId}; - `; -}; - -/** - * Attempt to insert a new permit and associate the project and survey to it. - * - * On conflict (if the permit already exists and belongs to the project), update the permit and associate the survey - * to it. - * - * @param {number} systemUserId - * @param {number} projectId - * @param {number} surveyId - * @param {string} permitNumber - * @param {string} permitType - * @return {*} {SQLStatement} - */ -export const insertSurveyPermitSQL = ( - systemUserId: number, - projectId: number, - surveyId: number, - permitNumber: string, - permitType: string -): SQLStatement => { - return SQL` - INSERT INTO permit ( - system_user_id, - project_id, - survey_id, - number, - type - ) VALUES ( - ${systemUserId}, - ${projectId}, - ${surveyId}, - ${permitNumber}, - ${permitType} - ) - ON CONFLICT (number) DO - UPDATE SET - survey_id = ${surveyId} - WHERE - permit.project_id = ${projectId} - AND - permit.survey_id is NULL; - `; -}; - -/** - * Update an existing permit by associatingF the survey to it. - * - * @param {number} projectId - * @param {number} surveyId - * @param {string} permitNumber - * @return {*} {(SQLStatement} - */ -export const associateSurveyToPermitSQL = (projectId: number, surveyId: number, permitNumber: string): SQLStatement => { - return SQL` - UPDATE - permit - SET - survey_id = ${surveyId} - WHERE - project_id = ${projectId} - AND - number = ${permitNumber}; - `; -}; - -/** - * Knex query builder to update a survey row. - * - * @param {number} surveyId - * @param {PutSurveyObject} data - * @returns {Knex.QueryBuilder} knex query builder - */ -export const putSurveyDetailsSQL = (surveyId: number, data: PutSurveyObject): Knex.QueryBuilder => { - const knex = getKnex(); - - let fieldsToUpdate = {}; - - if (data.survey_details) { - fieldsToUpdate = { - ...fieldsToUpdate, - name: data.survey_details.name, - start_date: data.survey_details.start_date, - end_date: data.survey_details.end_date, - lead_first_name: data.survey_details.lead_first_name, - lead_last_name: data.survey_details.lead_last_name, - revision_count: data.survey_details.revision_count - }; - } - - if (data.purpose_and_methodology) { - fieldsToUpdate = { - ...fieldsToUpdate, - field_method_id: data.purpose_and_methodology.field_method_id, - additional_details: data.purpose_and_methodology.additional_details, - ecological_season_id: data.purpose_and_methodology.ecological_season_id, - intended_outcome_id: data.purpose_and_methodology.intended_outcome_id, - surveyed_all_areas: data.purpose_and_methodology.surveyed_all_areas, - revision_count: data.purpose_and_methodology.revision_count - }; - } - - if (data.location) { - const geometrySqlStatement = SQL``; - - if (data.location.geometry && data.location.geometry.length) { - geometrySqlStatement.append(SQL` - public.geography( - public.ST_Force2D( - public.ST_SetSRID( - `); - - const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(data.location.geometry); - geometrySqlStatement.append(geometryCollectionSQL); - - geometrySqlStatement.append(SQL` - , 4326))) - `); - } else { - geometrySqlStatement.append(SQL` - null - `); - } - - fieldsToUpdate = { - ...fieldsToUpdate, - location_name: data.location.survey_area_name, - geojson: JSON.stringify(data.location.geometry), - geography: knex.raw(geometrySqlStatement.sql, geometrySqlStatement.values), - revision_count: data.location.revision_count - }; - } - - return knex('survey').update(fieldsToUpdate).where('survey_id', surveyId); -}; - -/** - * SQL query to update the publish status of a survey. - * - * @param {number} surveyId - * @param {boolean} publish - * @returns {SQLStatement} sql query object - */ -export const updateSurveyPublishStatusSQL = (surveyId: number, publish: boolean): SQLStatement | null => { - if (!surveyId) { - return null; - } - - const sqlStatement: SQLStatement = SQL` - UPDATE - survey - SET - publish_timestamp = `; - - if (publish) { - sqlStatement.append(SQL` - now() - WHERE - survey_id = ${surveyId} - AND - publish_timestamp IS NULL - `); - } else { - sqlStatement.append(SQL` - null - WHERE - survey_id = ${surveyId} - AND - publish_timestamp IS NOT NULL - `); - } - - sqlStatement.append(SQL` - RETURNING - survey_id as id; - `); - - return sqlStatement; -}; diff --git a/api/src/queries/survey/survey-view-queries.test.ts b/api/src/queries/survey/survey-view-queries.test.ts deleted file mode 100644 index fdc5954388..0000000000 --- a/api/src/queries/survey/survey-view-queries.test.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - getAllAssignablePermitsForASurveySQL, - getSurveyBasicDataForViewSQL, - getSurveyFocalSpeciesDataForViewSQL, - getSurveyFundingSourcesDataForViewSQL, - getSurveyIdsSQL -} from './survey-view-queries'; - -describe('getAllAssignablePermitsForASurveySQL', () => { - it('returns null when null project id param provided', () => { - const response = getAllAssignablePermitsForASurveySQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when valid params passed in', () => { - const response = getAllAssignablePermitsForASurveySQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyIdsSQL', () => { - it('returns a sql statement', () => { - const response = getSurveyIdsSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyBasicDataForViewSQL', () => { - it('returns a null response when null survey id param provided', () => { - const response = getSurveyBasicDataForViewSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when valid params passed in', () => { - const response = getSurveyBasicDataForViewSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyFundingSourcesDataForViewSQL', () => { - it('returns a null response when null survey id param provided', () => { - const response = getSurveyFundingSourcesDataForViewSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when valid params passed in', () => { - const response = getSurveyFundingSourcesDataForViewSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyFocalSpeciesDataForViewSQL', () => { - it('returns a null response when null survey id param provided', () => { - const response = getSurveyFocalSpeciesDataForViewSQL((null as unknown) as number); - - expect(response).to.be.null; - }); - - it('returns a non null response when valid params passed in', () => { - const response = getSurveyFocalSpeciesDataForViewSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-view-queries.ts b/api/src/queries/survey/survey-view-queries.ts deleted file mode 100644 index edc3d0bef7..0000000000 --- a/api/src/queries/survey/survey-view-queries.ts +++ /dev/null @@ -1,210 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to get all permits applicable for a survey - * - * These are permits that are associated to a project but have not been used by any - * other surveys under that project - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getAllAssignablePermitsForASurveySQL = (projectId: number): SQLStatement | null => { - if (!projectId) { - return null; - } - - return SQL` - SELECT - number, - type - FROM - permit - WHERE - project_id = ${projectId} - AND - survey_id IS NULL; - `; -}; - -/** - * SQL query to get all survey ids for a given project. - * - * @param {number} projectId - * @returns {SQLStatement} sql query object - */ -export const getSurveyIdsSQL = (projectId: number): SQLStatement => { - return SQL` - SELECT - survey_id as id - FROM - survey - WHERE - project_id = ${projectId}; - `; -}; - -export const getSurveyBasicDataForViewSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - s.survey_id as id, - s.name, - s.additional_details, - s.field_method_id, - s.ecological_season_id, - s.intended_outcome_id, - s.surveyed_all_areas, - s.start_date, - s.end_date, - s.lead_first_name, - s.lead_last_name, - s.location_name, - s.geojson as geometry, - s.revision_count, - s.publish_timestamp as publish_date, - per.number, - per.type, - max(os.occurrence_submission_id) as occurrence_submission_id, - max(sss.survey_summary_submission_id) as survey_summary_submission_id - FROM - survey as s - LEFT OUTER JOIN - permit as per - ON - per.survey_id = s.survey_id - LEFT OUTER JOIN - field_method as fm - ON - fm.field_method_id = s.field_method_id - LEFT OUTER JOIN - occurrence_submission as os - ON - os.survey_id = s.survey_id - LEFT OUTER JOIN - survey_summary_submission sss - ON - sss.survey_id = s.survey_id - WHERE - s.survey_id = ${surveyId} - GROUP BY - s.survey_id, - s.name, - s.field_method_id, - s.additional_details, - s.intended_outcome_id, - s.surveyed_all_areas, - s.ecological_season_id, - s.start_date, - s.end_date, - s.lead_first_name, - s.lead_last_name, - s.location_name, - s.geojson, - s.revision_count, - s.publish_timestamp, - per.number, - per.type; - `; -}; - -export const getSurveyFundingSourcesDataForViewSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - sfs.project_funding_source_id, - fs.funding_source_id, - pfs.funding_source_project_id, - pfs.funding_amount::numeric::int, - pfs.funding_start_date, - pfs.funding_end_date, - iac.investment_action_category_id, - iac.name as investment_action_category_name, - fs.name as agency_name - FROM - survey as s - RIGHT OUTER JOIN - survey_funding_source as sfs - ON - sfs.survey_id = s.survey_id - RIGHT OUTER JOIN - project_funding_source as pfs - ON - pfs.project_funding_source_id = sfs.project_funding_source_id - RIGHT OUTER JOIN - investment_action_category as iac - ON - pfs.investment_action_category_id = iac.investment_action_category_id - RIGHT OUTER JOIN - funding_source as fs - ON - iac.funding_source_id = fs.funding_source_id - WHERE - s.survey_id = ${surveyId} - GROUP BY - sfs.project_funding_source_id, - fs.funding_source_id, - pfs.funding_source_project_id, - pfs.funding_amount, - pfs.funding_start_date, - pfs.funding_end_date, - iac.investment_action_category_id, - iac.name, - fs.name - ORDER BY - pfs.funding_start_date; - `; -}; - -export const getSurveyFocalSpeciesDataForViewSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - wldtaxonomic_units_id, is_focal - FROM - study_species - WHERE - survey_id = ${surveyId} - AND - is_focal = TRUE; - `; -}; - -export const getLatestOccurrenceSubmissionIdSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - max(occurrence_submission_id) as id - FROM - occurrence_submission - WHERE - survey_id = ${surveyId}; - `; -}; - -export const getLatestSummaryResultIdSQL = (surveyId: number): SQLStatement | null => { - if (!surveyId) { - return null; - } - - return SQL` - SELECT - max(survey_summary_submission_id) as id - FROM - survey_summary_submission - WHERE - survey_id = ${surveyId}; - `; -}; diff --git a/api/src/queries/survey/survey-view-update-queries.test.ts b/api/src/queries/survey/survey-view-update-queries.test.ts deleted file mode 100644 index c58c9e0fcc..0000000000 --- a/api/src/queries/survey/survey-view-update-queries.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import { expect } from 'chai'; -import { describe } from 'mocha'; -import { - getSurveyProprietorForUpdateSQL, - getSurveyPurposeAndMethodologyForUpdateSQL -} from './survey-view-update-queries'; - -describe('getSurveyPurposeAndMethodologyForUpdateSQL', () => { - it('returns a sql statement', () => { - const response = getSurveyPurposeAndMethodologyForUpdateSQL(1); - - expect(response).to.not.be.null; - }); -}); - -describe('getSurveyProprietorForUpdateSQL', () => { - it('returns a sql statement', () => { - const response = getSurveyProprietorForUpdateSQL(1); - - expect(response).to.not.be.null; - }); -}); diff --git a/api/src/queries/survey/survey-view-update-queries.ts b/api/src/queries/survey/survey-view-update-queries.ts deleted file mode 100644 index 8807879efb..0000000000 --- a/api/src/queries/survey/survey-view-update-queries.ts +++ /dev/null @@ -1,65 +0,0 @@ -import { SQL, SQLStatement } from 'sql-template-strings'; - -/** - * SQL query to retrieve a survey_proprietor row. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveyProprietorForUpdateSQL = (surveyId: number): SQLStatement => { - return SQL` - SELECT - prt.name as proprietor_type_name, - prt.proprietor_type_id, - fn.name as first_nations_name, - fn.first_nations_id, - sp.rationale as category_rationale, - CASE - WHEN sp.proprietor_name is not null THEN sp.proprietor_name - WHEN fn.first_nations_id is not null THEN fn.name - END as proprietor_name, - sp.disa_required, - sp.revision_count - from - survey_proprietor as sp - left outer join proprietor_type as prt - on sp.proprietor_type_id = prt.proprietor_type_id - left outer join first_nations as fn - on sp.first_nations_id is not null - and sp.first_nations_id = fn.first_nations_id - where - survey_id = ${surveyId}; - `; -}; - -/** - * SQL query to retrieve a survey_proprietor row. - * - * @param {number} surveyId - * @returns {SQLStatement} sql query object - */ -export const getSurveyPurposeAndMethodologyForUpdateSQL = (surveyId: number): SQLStatement => { - return SQL` - SELECT - s.field_method_id, - s.additional_details, - s.ecological_season_id, - s.intended_outcome_id, - s.surveyed_all_areas, - array_remove(array_agg(sv.vantage_id), NULL) as vantage_ids - FROM - survey s - LEFT OUTER JOIN - survey_vantage sv - ON - sv.survey_id = s.survey_id - WHERE - s.survey_id = ${surveyId} - GROUP BY - s.field_method_id, - s.additional_details, - s.ecological_season_id, - s.intended_outcome_id, - s.surveyed_all_areas; - `; -}; diff --git a/api/src/queries/users/user-queries.ts b/api/src/queries/users/user-queries.ts index d4148d88c5..0b471e3ae5 100644 --- a/api/src/queries/users/user-queries.ts +++ b/api/src/queries/users/user-queries.ts @@ -1,4 +1,5 @@ import { SQL, SQLStatement } from 'sql-template-strings'; +import { SYSTEM_IDENTITY_SOURCE } from '../../constants/database'; /** * SQL query to get a single user and their system roles, based on their user_identifier. @@ -99,8 +100,12 @@ export const getUserListSQL = (): SQLStatement | null => { system_role sr ON sur.system_role_id = sr.system_role_id + LEFT JOIN + user_identity_source uis + ON + su.user_identity_source_id = uis.user_identity_source_id WHERE - su.record_end_date IS NULL + su.record_end_date IS NULL and uis.name not in (${SYSTEM_IDENTITY_SOURCE.DATABASE}) GROUP BY su.system_user_id, su.record_end_date, diff --git a/api/src/repositories/attachment-repository.test.ts b/api/src/repositories/attachment-repository.test.ts new file mode 100644 index 0000000000..b83e3d492c --- /dev/null +++ b/api/src/repositories/attachment-repository.test.ts @@ -0,0 +1,985 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { PostReportAttachmentMetadata, PutReportAttachmentMetadata } from '../models/project-survey-attachments'; +import { AttachmentRepository } from '../repositories/attachment-repository'; +import { getMockDBConnection } from '../__mocks__/db'; + +chai.use(sinonChai); + +describe('AttachmentRepository', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('Project', () => { + describe('Attachment', () => { + describe('getProjectAttachments', () => { + it('should return rows', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectAttachments(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectAttachments(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project attachments by projectId'); + } + }); + }); + + describe('getProjectAttachmentById', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectAttachmentById(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectAttachmentById(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project attachment by attachmentId'); + } + }); + }); + + describe('insertProjectAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.insertProjectAttachment( + ({ file: 'name' } as unknown) as Express.Multer.File, + 1, + 'string', + 'string' + ); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.insertProjectAttachment( + ({ file: 'name' } as unknown) as Express.Multer.File, + 1, + 'string', + 'string' + ); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project attachment data'); + } + }); + }); + + describe('updateProjectAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.updateProjectAttachment('string', 1, 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.updateProjectAttachment('string', 1, 'string'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update project attachment data'); + } + }); + }); + + describe('getProjectAttachmentByFileName', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectAttachmentByFileName(1, 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql({ rows: [{ id: 1 }], rowCount: 1 }); + }); + }); + + describe('getProjectAttachmentS3Key', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ key: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectAttachmentS3Key(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectAttachmentS3Key(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get Project Attachment S3 Key'); + } + }); + }); + + describe('deleteProjectAttachment', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.deleteProjectAttachment(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.deleteProjectAttachment(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete Project Attachment by id'); + } + }); + }); + }); + + describe('Report Attachment', () => { + describe('getProjectReportAttachments', () => { + it('should return rows', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectReportAttachments(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectReportAttachments(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project report attachments by projectId'); + } + }); + }); + + describe('getProjectReportAttachmentById', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectReportAttachmentById(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectReportAttachmentById(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project report attachments by reportAttachmentId'); + } + }); + }); + + describe('getProjectReportAttachmentAuthors', () => { + it('should return rows', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectReportAttachmentAuthors(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectReportAttachmentAuthors(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal( + 'Failed to get project report attachment authors by reportAttachmentId' + ); + } + }); + }); + + describe('insertProjectReportAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.insertProjectReportAttachment( + 'string', + 1, + 1, + ({ title: 'string' } as unknown) as PostReportAttachmentMetadata, + 'string' + ); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.insertProjectReportAttachment( + 'string', + 1, + 1, + ({ title: 'string' } as unknown) as PostReportAttachmentMetadata, + 'string' + ); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project report attachment data'); + } + }); + }); + + describe('updateProjectReportAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.updateProjectReportAttachment('string', 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.updateProjectReportAttachment('string', 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update project attachment data'); + } + }); + }); + + describe('getProjectReportAttachmentByFileName', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectReportAttachmentByFileName(1, 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql({ rows: [{ id: 1 }], rowCount: 1 }); + }); + }); + + describe('deleteProjectReportAttachmentAuthors', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.deleteProjectReportAttachmentAuthors(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ rows: [{ id: 1 }], rowCount: 1 }); + }); + }); + + describe('insertProjectReportAttachmentAuthor', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.insertProjectReportAttachmentAuthor(1, { + first_name: 'name', + last_name: 'name' + }); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.insertProjectReportAttachmentAuthor(1, { + first_name: 'name', + last_name: 'name' + }); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert attachment report author record'); + } + }); + }); + + describe('updateProjectReportAttachmentMetadata', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.updateProjectReportAttachmentMetadata(1, 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.updateProjectReportAttachmentMetadata(1, 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update Project Report Attachment Metadata'); + } + }); + }); + + describe('getProjectReportAttachmentS3Key', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ key: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getProjectReportAttachmentS3Key(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getProjectReportAttachmentS3Key(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get Project Report Attachment S3 Key'); + } + }); + }); + + describe('deleteProjectReportAttachment', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.deleteProjectReportAttachment(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.deleteProjectReportAttachment(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete Project Report Attachment by id'); + } + }); + }); + }); + }); + + describe('Survey', () => { + describe('Attachment', () => { + describe('getSurveyAttachments', () => { + it('should return rows', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyAttachments(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getSurveyAttachments(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey attachments by surveyId'); + } + }); + }); + + describe('deleteSurveyAttachment', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.deleteSurveyAttachment(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.deleteSurveyAttachment(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete Survey Attachment'); + } + }); + }); + + describe('getSurveyAttachmentS3Key', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ key: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyAttachmentS3Key(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getSurveyAttachmentS3Key(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get Survey Attachment S3 key'); + } + }); + }); + + describe('updateSurveyAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.updateSurveyAttachment(1, 'string', 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.updateSurveyAttachment(1, 'string', 'string'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update survey attachment data'); + } + }); + }); + + describe('insertSurveyAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.insertSurveyAttachment('string', 1, 'string', 1, 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.insertSurveyAttachment('string', 1, 'string', 1, 'string'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey attachment data'); + } + }); + }); + + describe('getSurveyAttachmentByFileName', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyAttachmentByFileName('string', 1); + + expect(response).to.not.be.null; + expect(response).to.eql({ rows: [{ id: 1 }], rowCount: 1 }); + }); + }); + }); + + describe('Report Attachment', () => { + describe('getSurveyReportAttachments', () => { + it('should return rows', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyReportAttachments(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getSurveyReportAttachments(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey report attachments by surveyId'); + } + }); + }); + + describe('getSurveyReportAttachmentById', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyReportAttachmentById(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getSurveyReportAttachmentById(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey report attachments by reportAttachmentId'); + } + }); + }); + + describe('getSurveyReportAttachmentAuthors', () => { + it('should return rows', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyReportAttachmentAuthors(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getSurveyReportAttachmentAuthors(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal( + 'Failed to get survey report attachment authors by reportAttachmentId' + ); + } + }); + }); + + describe('insertSurveyReportAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.insertSurveyReportAttachment( + 'string', + 1, + 1, + ({ title: 'string' } as unknown) as PostReportAttachmentMetadata, + 'string' + ); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.insertSurveyReportAttachment( + 'string', + 1, + 1, + ({ title: 'string' } as unknown) as PostReportAttachmentMetadata, + 'string' + ); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey report attachment'); + } + }); + }); + + describe('updateSurveyReportAttachment', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.updateSurveyReportAttachment('string', 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: undefined } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.updateSurveyReportAttachment('string', 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update survey report attachment'); + } + }); + }); + + describe('getSurveyReportAttachmentByFileName', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyReportAttachmentByFileName(1, 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql({ rows: [{ id: 1 }], rowCount: 1 }); + }); + }); + + describe('deleteSurveyReportAttachmentAuthors', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.deleteSurveyReportAttachmentAuthors(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('insertSurveyReportAttachmentAuthor', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.insertSurveyReportAttachmentAuthor(1, { + first_name: 'name', + last_name: 'name' + }); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.insertSurveyReportAttachmentAuthor(1, { + first_name: 'name', + last_name: 'name' + }); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey report attachment'); + } + }); + }); + + describe('deleteSurveyReportAttachment', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.deleteSurveyReportAttachment(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.deleteSurveyReportAttachment(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete Survey Report Attachment'); + } + }); + }); + + describe('getSurveyReportAttachmentS3Key', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ key: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.getSurveyReportAttachmentS3Key(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.getSurveyReportAttachmentS3Key(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get Survey Report Attachment S3 key'); + } + }); + }); + + describe('updateSurveyReportAttachmentMetadata', () => { + it('should return row', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + const response = await repository.updateSurveyReportAttachmentMetadata(1, 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: null } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new AttachmentRepository(dbConnection); + + try { + await repository.updateSurveyReportAttachmentMetadata(1, 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update Survey Report Attachment metadata'); + } + }); + }); + }); + }); +}); diff --git a/api/src/repositories/attachment-repository.ts b/api/src/repositories/attachment-repository.ts new file mode 100644 index 0000000000..e699ba54ef --- /dev/null +++ b/api/src/repositories/attachment-repository.ts @@ -0,0 +1,1108 @@ +import { QueryResult } from 'pg'; +import SQL from 'sql-template-strings'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { PostReportAttachmentMetadata, PutReportAttachmentMetadata } from '../models/project-survey-attachments'; +import { getLogger } from '../utils/logger'; +import { BaseRepository } from './base-repository'; + +export type ISurveyAttachment = IProjectAttachment; + +export type ISurveyReportAttachment = IProjectReportAttachment; + +export interface IProjectAttachment { + id: number; + file_name: string; + file_type: string; + create_user: number; + create_date: string; + update_date: string; + file_size: string; + key: string; + revision_count: number; +} + +export interface IProjectReportAttachment { + id: number; + file_name: string; + create_user: number; + title: string; + description: string; + year_published: number; + last_modified: string; + key: string; + file_size: string; + revision_count: number; +} + +export interface IReportAttachmentAuthor { + project_report_author_id: number; + project_report_attachment_id: number; + first_name: string; + last_name: string; + update_date: string; + revision_count: number; +} + +const defaultLog = getLogger('repositories/attachment-repository'); + +/** + * A repository class for accessing project and survey attachment data + * + * @export + * @class AttachmentRepository + * @extends {BaseRepository} + */ +export class AttachmentRepository extends BaseRepository { + /** + * SQL query to get report attachments for a single project. + * + * @param {number} projectId The project ID + * @return {Promise} Promise resolving all project attachments + * @memberof AttachmentRepository + */ + async getProjectAttachments(projectId: number): Promise { + defaultLog.debug({ label: 'getProjectAttachments' }); + + const sqlStatement = SQL` + SELECT + project_attachment_id AS id, + file_name, + file_type, + create_user, + update_date, + create_date, + file_size, + key + FROM + project_attachment + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get project attachments by projectId', [ + 'AttachmentRepository->getProjectAttachments', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + /** + * Query to get a single project attachment by attachment ID/ + * @param {number} projectId The ID of the project + * @param {number} attachmentId The ID of the attachment + * @return {Promise} A promise resolving the project attachment having the + * given ID. + * @memberof AttachmentRepository + */ + async getProjectAttachmentById(projectId: number, attachmentId: number): Promise { + defaultLog.debug({ label: 'getProjectAttachmentById' }); + + const sqlStatement = SQL` + SELECT + project_attachment_id AS id, + file_name, + file_type, + create_user, + update_date, + create_date, + file_size, + key + FROM + project_attachment + WHERE + project_attachment_id = ${attachmentId} + AND + project_id = ${projectId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get project attachment by attachmentId', [ + 'AttachmentRepository->getProjectAttachmentById', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + /** + * Query to return all project report attachments belonging to the given project. + * @param {number} projectId the ID of the project + * @return {Promise} Promise resolving all of the attachments for the + * given project + * @memberof AttachmentRepository + */ + async getProjectReportAttachments(projectId: number): Promise { + defaultLog.debug({ label: 'getProjectReportAttachments' }); + + const sqlStatement = SQL` + SELECT + project_report_attachment_id as id, + file_name, + create_user, + title, + description, + year::int as year_published, + CASE + WHEN update_date IS NULL + THEN create_date::text + ELSE update_date::text + END AS last_modified, + file_size, + key, + revision_count + FROM + project_report_attachment + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get project report attachments by projectId', [ + 'AttachmentRepository->getProjectReportAttachments', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + /** + * Query to return the report attachment having the given ID and belonging to the given project. + * @param {number} projectId the ID of the project + * @param {number} reportAttachmentId the ID of the report attachment + * @return {Promise} Promise resolving the report attachment + * @memberof AttachmentRepository + */ + async getProjectReportAttachmentById( + projectId: number, + reportAttachmentId: number + ): Promise { + defaultLog.debug({ label: 'getProjectReportAttachmentById' }); + + const sqlStatement = SQL` + SELECT + project_report_attachment_id as id, + file_name, + title, + description, + year::int as year_published, + CASE + WHEN update_date IS NULL + THEN create_date::text + ELSE update_date::text + END AS last_modified, + file_size, + key, + revision_count + FROM + project_report_attachment + WHERE + project_report_attachment_id = ${reportAttachmentId} + AND + project_id = ${projectId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get project report attachments by reportAttachmentId', [ + 'AttachmentRepository->getProjectReportAttachmentById', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + /** + * SQL query to get survey attachments for a single project. + * + * @param {number} surveyId The survey ID + * @return {Promise} Promise resolving all survey attachments + * @memberof AttachmentRepository + */ + async getSurveyAttachments(surveyId: number): Promise { + defaultLog.debug({ label: 'getSurveyAttachments' }); + + const sqlStatement = SQL` + SELECT + survey_attachment_id as id, + file_name, + file_type, + create_date, + update_date, + create_date, + file_size, + key + FROM + survey_attachment + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get survey attachments by surveyId', [ + 'AttachmentRepository->getSurveyAttachments', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + /** + * Query to return all survey report attachments belonging to the given survey. + * @param {number} surveyId the ID of the survey + * @return {Promise} Promise resolving all of the attachments for the + * given survey + * @memberof AttachmentRepository + */ + async getSurveyReportAttachments(surveyId: number): Promise { + defaultLog.debug({ label: 'getSurveyReportAttachments' }); + + const sqlStatement = SQL` + SELECT + survey_report_attachment_id as id, + file_name, + create_user, + title, + description, + year::int as year_published, + CASE + WHEN update_date IS NULL + THEN create_date::text + ELSE update_date::text + END AS last_modified, + file_size, + key, + revision_count + FROM + survey_report_attachment + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get survey report attachments by surveyId', [ + 'AttachmentRepository->getSurveyReportAttachments', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + /** + * Query to return the report attachment having the given ID and belonging to the given survey. + * @param {number} surveyId the ID of the survey + * @param {number} reportAttachmentId the ID of the report attachment + * @return {Promise} Promise resolving the report attachment + * @memberof AttachmentRepository + */ + async getSurveyReportAttachmentById(surveyId: number, reportAttachmentId: number): Promise { + defaultLog.debug({ label: 'getSurveyReportAttachmentById' }); + + const sqlStatement = SQL` + SELECT + survey_report_attachment_id as id, + file_name, + title, + description, + year::int as year_published, + CASE + WHEN update_date IS NULL + THEN create_date::text + ELSE update_date::text + END AS last_modified, + file_size, + key, + revision_count + FROM + survey_report_attachment + WHERE + survey_report_attachment_id = ${reportAttachmentId} + AND + survey_id = ${surveyId} + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get survey report attachments by reportAttachmentId', [ + 'AttachmentRepository->getSurveyReportAttachmentById', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + /** + * Query to return all of the authors belonging to a project report attachment + * @param {number} reportAttachmentId The ID of the report attachment + * @return {Promise} Promise resolving the report authors + * @memberof AttachmentRepository + */ + async getProjectReportAttachmentAuthors(reportAttachmentId: number): Promise { + defaultLog.debug({ label: 'getProjectAttachmentAuthors' }); + + const sqlStatement = SQL` + SELECT + project_report_author.* + FROM + project_report_author + WHERE + project_report_attachment_id = ${reportAttachmentId} + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get project report attachment authors by reportAttachmentId', [ + 'AttachmentRepository->getProjectAttachmentAuthors', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + /** + * Query to return all of the authors belonging to a survey report attachment + * @param {number} reportAttachmentId The ID of the report attachment + * @return {Promise} Promise resolving the report authors + * @memberof AttachmentRepository + */ + async getSurveyReportAttachmentAuthors(reportAttachmentId: number): Promise { + defaultLog.debug({ label: 'getSurveyAttachmentAuthors' }); + + const sqlStatement = SQL` + SELECT + survey_report_author.* + FROM + survey_report_author + WHERE + survey_report_attachment_id = ${reportAttachmentId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rows) { + throw new ApiExecuteSQLError('Failed to get survey report attachment authors by reportAttachmentId', [ + 'AttachmentRepository->getSurveyAttachmentAuthors', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + async insertProjectAttachment( + file: Express.Multer.File, + projectId: number, + attachmentType: string, + key: string + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + INSERT INTO project_attachment ( + project_id, + file_name, + file_size, + file_type, + key + ) VALUES ( + ${projectId}, + ${file.originalname}, + ${file.size}, + ${attachmentType}, + ${key} + ) + RETURNING + project_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows || !response?.rows[0]) { + throw new ApiExecuteSQLError('Failed to insert project attachment data', [ + 'AttachmentRepository->insertProjectAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async updateProjectAttachment( + fileName: string, + projectId: number, + attachmentType: string + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + UPDATE + project_attachment + SET + file_name = ${fileName}, + file_type = ${attachmentType} + WHERE + file_name = ${fileName} + AND + project_id = ${projectId} + RETURNING + project_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows || !response?.rows[0]) { + throw new ApiExecuteSQLError('Failed to update project attachment data', [ + 'AttachmentRepository->updateProjectAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async getProjectAttachmentByFileName(projectId: number, fileName: string): Promise { + const sqlStatement = SQL` + SELECT + project_attachment_id as id, + file_name, + update_date, + create_date, + file_size + from + project_attachment + where + project_id = ${projectId} + and + file_name = ${fileName}; + `; + + const response = await this.connection.sql(sqlStatement); + + return response; + } + + async insertProjectReportAttachment( + fileName: string, + fileSize: number, + projectId: number, + attachmentMeta: PostReportAttachmentMetadata, + key: string + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + INSERT INTO project_report_attachment ( + project_id, + file_name, + title, + year, + description, + file_size, + key + ) VALUES ( + ${projectId}, + ${fileName}, + ${attachmentMeta.title}, + ${attachmentMeta.year_published}, + ${attachmentMeta.description}, + ${fileSize}, + ${key} + ) + RETURNING + project_report_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows || !response?.rows[0]) { + throw new ApiExecuteSQLError('Failed to insert project report attachment data', [ + 'AttachmentRepository->insertProjectReportAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async updateProjectReportAttachment( + fileName: string, + projectId: number, + attachmentMeta: PutReportAttachmentMetadata + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + UPDATE + project_report_attachment + SET + file_name = ${fileName}, + title = ${attachmentMeta.title}, + year = ${attachmentMeta.year_published}, + description = ${attachmentMeta.description} + WHERE + file_name = ${fileName} + AND + project_id = ${projectId} + RETURNING + project_report_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows || !response?.rows[0]) { + throw new ApiExecuteSQLError('Failed to update project attachment data', [ + 'AttachmentRepository->updateProjectReportAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async deleteProjectReportAttachmentAuthors(attachmentId: number): Promise { + const sqlStatement = SQL` + DELETE + FROM project_report_author + WHERE + project_report_attachment_id = ${attachmentId}; + `; + + const response = await this.connection.sql(sqlStatement); + + return response; + } + + async insertProjectReportAttachmentAuthor( + attachmentId: number, + author: { first_name: string; last_name: string } + ): Promise { + const sqlStatement = SQL` + INSERT INTO project_report_author ( + project_report_attachment_id, + first_name, + last_name + ) VALUES ( + ${attachmentId}, + ${author.first_name}, + ${author.last_name} + ); + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to insert attachment report author record', [ + 'AttachmentRepository->insertProjectReportAttachmentAuthor', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async getProjectReportAttachmentByFileName(projectId: number, fileName: string): Promise { + const sqlStatement = SQL` + SELECT + project_report_attachment_id as id, + file_name, + update_date, + create_date, + file_size + from + project_report_attachment + where + project_id = ${projectId} + and + file_name = ${fileName}; + `; + + const response = await this.connection.sql(sqlStatement); + + return response; + } + + async getProjectAttachmentS3Key(projectId: number, attachmentId: number): Promise { + const sqlStatement = SQL` + SELECT + key + FROM + project_attachment + WHERE + project_id = ${projectId} + AND + project_attachment_id = ${attachmentId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to get Project Attachment S3 Key', [ + 'AttachmentRepository->getProjectAttachmentS3Key', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0].key; + } + + async updateProjectReportAttachmentMetadata( + projectId: number, + attachmentId: number, + metadata: PutReportAttachmentMetadata + ): Promise { + const sqlStatement = SQL` + UPDATE + project_report_attachment + SET + title = ${metadata.title}, + year = ${metadata.year_published}, + description = ${metadata.description} + WHERE + project_id = ${projectId} + AND + project_report_attachment_id = ${attachmentId} + AND + revision_count = ${metadata.revision_count}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to update Project Report Attachment Metadata', [ + 'AttachmentRepository->updateProjectReportAttachmentMetadata', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async getProjectReportAttachmentS3Key(projectId: number, attachmentId: number): Promise { + const sqlStatement = SQL` + SELECT + key + FROM + project_report_attachment + WHERE + project_id = ${projectId} + AND + project_report_attachment_id = ${attachmentId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to get Project Report Attachment S3 Key', [ + 'AttachmentRepository->getProjectReportAttachmentS3Key', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0].key; + } + + async deleteProjectAttachment(attachmentId: number): Promise<{ key: string }> { + const sqlStatement = SQL` + DELETE + from project_attachment + WHERE + project_attachment_id = ${attachmentId} + RETURNING + key; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to delete Project Attachment by id', [ + 'AttachmentRepository->deleteProjectAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async deleteProjectReportAttachment(attachmentId: number): Promise<{ key: string }> { + const sqlStatement = SQL` + DELETE + from project_report_attachment + WHERE + project_report_attachment_id = ${attachmentId} + RETURNING + key; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to delete Project Report Attachment by id', [ + 'AttachmentRepository->deleteProjectReportAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async insertSurveyReportAttachment( + fileName: string, + fileSize: number, + surveyId: number, + attachmentMeta: PostReportAttachmentMetadata, + key: string + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + INSERT INTO survey_report_attachment ( + survey_id, + file_name, + title, + year, + description, + file_size, + key + ) VALUES ( + ${surveyId}, + ${fileName}, + ${attachmentMeta.title}, + ${attachmentMeta.year_published}, + ${attachmentMeta.description}, + ${fileSize}, + ${key} + ) + RETURNING + survey_report_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to insert survey report attachment', [ + 'AttachmentRepository->insertSurveyReportAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async updateSurveyReportAttachment( + fileName: string, + surveyId: number, + attachmentMeta: PutReportAttachmentMetadata + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + UPDATE + survey_report_attachment + SET + file_name = ${fileName}, + title = ${attachmentMeta.title}, + year = ${attachmentMeta.year_published}, + description = ${attachmentMeta.description} + WHERE + file_name = ${fileName} + AND + survey_id = ${surveyId} + RETURNING + survey_report_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to update survey report attachment', [ + 'AttachmentRepository->updateSurveyReportAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async deleteSurveyReportAttachmentAuthors(attachmentId: number): Promise { + const sqlStatement = SQL` + DELETE FROM + survey_report_author + WHERE + survey_report_attachment_id = ${attachmentId}; + `; + + await this.connection.sql(sqlStatement); + } + + async insertSurveyReportAttachmentAuthor( + attachmentId: number, + author: { first_name: string; last_name: string } + ): Promise { + const sqlStatement = SQL` + INSERT INTO survey_report_author ( + survey_report_attachment_id, + first_name, + last_name + ) VALUES ( + ${attachmentId}, + ${author.first_name}, + ${author.last_name} + ); + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to insert survey report attachment', [ + 'AttachmentRepository->insertSurveyReportAttachmentAuthor', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async getSurveyReportAttachmentByFileName(surveyId: number, fileName: string): Promise { + const sqlStatement = SQL` + SELECT + survey_report_attachment_id as id, + file_name, + update_date, + create_date, + file_size + from + survey_report_attachment + where + survey_id = ${surveyId} + and + file_name = ${fileName}; + `; + + const response = await this.connection.sql(sqlStatement); + + return response; + } + + async deleteSurveyReportAttachment(attachmentId: number): Promise<{ key: string }> { + const sqlStatement = SQL` + DELETE + from survey_report_attachment + WHERE + survey_report_attachment_id = ${attachmentId} + RETURNING + key; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to delete Survey Report Attachment', [ + 'AttachmentRepository->deleteSurveyReportAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async deleteSurveyAttachment(attachmentId: number): Promise<{ key: string }> { + const sqlStatement = SQL` + DELETE + from survey_attachment + WHERE + survey_attachment_id = ${attachmentId} + RETURNING + key; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to delete Survey Attachment', [ + 'AttachmentRepository->deleteSurveyAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async getSurveyAttachmentS3Key(surveyId: number, attachmentId: number): Promise { + const sqlStatement = SQL` + SELECT + key + FROM + survey_attachment + WHERE + survey_id = ${surveyId} + AND + survey_attachment_id = ${attachmentId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to get Survey Attachment S3 key', [ + 'AttachmentRepository->getSurveyAttachmentS3Key', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0].key; + } + + async getSurveyReportAttachmentS3Key(surveyId: number, attachmentId: number): Promise { + const sqlStatement = SQL` + SELECT + key + FROM + survey_report_attachment + WHERE + survey_id = ${surveyId} + AND + survey_report_attachment_id = ${attachmentId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to get Survey Report Attachment S3 key', [ + 'AttachmentRepository->getSurveyReportAttachmentS3Key', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0].key; + } + + async updateSurveyReportAttachmentMetadata( + surveyId: number, + attachmentId: number, + metadata: PutReportAttachmentMetadata + ): Promise { + const sqlStatement = SQL` + UPDATE + survey_report_attachment + SET + title = ${metadata.title}, + year = ${metadata.year_published}, + description = ${metadata.description} + WHERE + survey_id = ${surveyId} + AND + survey_report_attachment_id = ${attachmentId} + AND + revision_count = ${metadata.revision_count}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to update Survey Report Attachment metadata', [ + 'AttachmentRepository->updateSurveyReportAttachmentMetadata', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async updateSurveyAttachment( + surveyId: number, + fileName: string, + fileType: string + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + UPDATE + survey_attachment + SET + file_name = ${fileName}, + file_type = ${fileType} + WHERE + file_name = ${fileName} + AND + survey_id = ${surveyId} + RETURNING + survey_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to update survey attachment data', [ + 'AttachmentRepository->updateSurveyAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async insertSurveyAttachment( + fileName: string, + fileSize: number, + fileType: string, + surveyId: number, + key: string + ): Promise<{ id: number; revision_count: number }> { + const sqlStatement = SQL` + INSERT INTO survey_attachment ( + survey_id, + file_name, + file_size, + file_type, + key + ) VALUES ( + ${surveyId}, + ${fileName}, + ${fileSize}, + ${fileType}, + ${key} + ) + RETURNING + survey_attachment_id as id, + revision_count; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to insert survey attachment data', [ + 'AttachmentRepository->insertSurveyAttachment', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async getSurveyAttachmentByFileName(fileName: string, surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + survey_attachment_id as id, + file_name, + update_date, + create_date, + file_size + from + survey_attachment + where + survey_id = ${surveyId} + and + file_name = ${fileName}; + `; + + const response = await this.connection.sql(sqlStatement); + + return response; + } +} diff --git a/api/src/repositories/base-repository.ts b/api/src/repositories/base-repository.ts new file mode 100644 index 0000000000..bde6105710 --- /dev/null +++ b/api/src/repositories/base-repository.ts @@ -0,0 +1,15 @@ +import { IDBConnection } from '../database/db'; + +/** + * Base class for repositories. + * + * @export + * @class BaseRepository + */ +export class BaseRepository { + connection: IDBConnection; + + constructor(connection: IDBConnection) { + this.connection = connection; + } +} diff --git a/api/src/repositories/error-repository.test.ts b/api/src/repositories/error-repository.test.ts new file mode 100644 index 0000000000..8eceb4a0b3 --- /dev/null +++ b/api/src/repositories/error-repository.test.ts @@ -0,0 +1,85 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { ApiError } from '../errors/api-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { ErrorRepository } from './error-repository'; + +chai.use(sinonChai); + +describe('OccurrenceRepository', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('insertSubmissionStatus', () => { + it('should return submission ids if valid', async () => { + const returnValue = { submission_status_id: 1, submission_status_type_id: 2 }; + const mockResponse = ({ rows: [returnValue], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: async () => { + return mockResponse; + } + }); + const repo = new ErrorRepository(dbConnection); + const response = await repo.insertSubmissionStatus(1, SUBMISSION_STATUS_TYPE.SUBMITTED); + + expect(response).to.eql(returnValue); + }); + + it('should throw `Failed to insert` error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: async () => { + return mockResponse; + } + }); + const repo = new ErrorRepository(dbConnection); + try { + await repo.insertSubmissionStatus(1, SUBMISSION_STATUS_TYPE.SUBMITTED); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to insert submission status record'); + } + }); + }); + + describe('insertSubmissionMessage', () => { + it('should return submission ids if valid', async () => { + const returnValue = { submission_message_id: 1, submission_message_type_id: 2 }; + const mockResponse = ({ rows: [returnValue], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: async () => { + return mockResponse; + } + }); + const repo = new ErrorRepository(dbConnection); + const response = await repo.insertSubmissionMessage( + 1, + SUBMISSION_MESSAGE_TYPE.FAILED_GET_TRANSFORMATION_RULES, + 'msg' + ); + + expect(response).to.eql(returnValue); + }); + + it('should throw `Failed to insert` error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: async () => { + return mockResponse; + } + }); + const repo = new ErrorRepository(dbConnection); + try { + await repo.insertSubmissionMessage(1, SUBMISSION_MESSAGE_TYPE.FAILED_GET_TRANSFORMATION_RULES, 'msg'); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to insert submission message record'); + } + }); + }); +}); diff --git a/api/src/repositories/error-repository.ts b/api/src/repositories/error-repository.ts new file mode 100644 index 0000000000..094b5514b8 --- /dev/null +++ b/api/src/repositories/error-repository.ts @@ -0,0 +1,112 @@ +import SQL from 'sql-template-strings'; +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { BaseRepository } from './base-repository'; + +/** + * A repository class for accessing permit data. + * + * @export + * @class PermitRepository + * @extends {BaseRepository} + */ +export class ErrorRepository extends BaseRepository { + /** + * Insert a new submission status record. + * + * @param {number} submissionId + * @param {SUBMISSION_STATUS_TYPE} submissionStatusType + * @return {*} {Promise<{ submission_status_id: number; submission_status_type_id: number }>} + * @memberof SubmissionRepository + */ + async insertSubmissionStatus( + submissionId: number, + submissionStatusType: SUBMISSION_STATUS_TYPE + ): Promise<{ submission_status_id: number; submission_status_type_id: number }> { + const sqlStatement = SQL` + INSERT INTO submission_status ( + occurrence_submission_id, + submission_status_type_id, + event_timestamp + ) VALUES ( + ${submissionId}, + ( + SELECT + submission_status_type_id + FROM + submission_status_type + WHERE + name = ${submissionStatusType} + ), + now() + ) + RETURNING + submission_status_id, + submission_status_type_id; + `; + + const response = await this.connection.sql<{ submission_status_id: number; submission_status_type_id: number }>( + sqlStatement + ); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to insert submission status record', [ + 'ErrorRepository->insertSubmissionStatus', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + + return response.rows[0]; + } + + /** + * Insert a submission message record. + * + * @param {number} submissionStatusId + * @param {SUBMISSION_MESSAGE_TYPE} submissionMessageType + * @return {*} {Promise<{ submission_message_id: number; submission_message_type_id: number }>} + * @memberof SubmissionRepository + */ + async insertSubmissionMessage( + submissionStatusId: number, + submissionMessageType: SUBMISSION_MESSAGE_TYPE, + submissionMessage: string + ): Promise<{ submission_message_id: number; submission_message_type_id: number }> { + const sqlStatement = SQL` + INSERT INTO submission_message ( + submission_status_id, + submission_message_type_id, + event_timestamp, + message + ) VALUES ( + ${submissionStatusId}, + ( + SELECT + submission_message_type_id + FROM + submission_message_type + WHERE + name = ${submissionMessageType} + ), + now(), + ${submissionMessage} + ) + RETURNING + submission_message_id, + submission_message_type_id; + `; + + const response = await this.connection.sql<{ submission_message_id: number; submission_message_type_id: number }>( + sqlStatement + ); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to insert submission message record', [ + 'ErrorRepository->insertSubmissionMessage', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + + return response.rows[0]; + } +} diff --git a/api/src/repositories/occurrence-repository.test.ts b/api/src/repositories/occurrence-repository.test.ts new file mode 100644 index 0000000000..aa5a6a019b --- /dev/null +++ b/api/src/repositories/occurrence-repository.test.ts @@ -0,0 +1,162 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { HTTP400 } from '../errors/http-error'; +import { OccurrenceRepository } from '../repositories/occurrence-repository'; +import { SubmissionError } from '../utils/submission-error'; +import { getMockDBConnection } from '../__mocks__/db'; + +chai.use(sinonChai); + +describe('OccurrenceRepository', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getOccurrenceSubmission', () => { + it('should return a submission', async () => { + const mockResponse = ({ rows: [{ occurrence_submission_id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: async () => { + return mockResponse; + } + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.getOccurrenceSubmission(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ occurrence_submission_id: 1 }); + }); + }); + + describe('getOccurrencesForView', () => { + it('should return list of occurrences', async () => { + const mockResponse = ({ rows: [{ occurrence_id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + knex: async () => { + return mockResponse; + } + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.getOccurrencesForView(1); + + expect(response).to.have.length.greaterThan(0); + }); + }); + + describe('updateSurveyOccurrenceSubmissionWithOutputKey', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ rowCount: 1, rows: [{ id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.updateSurveyOccurrenceSubmissionWithOutputKey(1, 'fileName', 'outputkey'); + expect(response).to.be.eql({ id: 1 }); + }); + + it('should throw `Failed to update` error', async () => { + const mockResponse = ({} as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: async () => { + return mockResponse; + } + }); + const repo = new OccurrenceRepository(dbConnection); + try { + await repo.updateSurveyOccurrenceSubmissionWithOutputKey(1, 'file', 'key'); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.equal('Rejected'); + } + }); + }); + + describe('updateDWCSourceForOccurrenceSubmission', () => { + it('should return submission id', async () => { + const mockResponse = ({ rows: [{ occurrence_submission_id: 1 }], rowCount: 1 } as any) as Promise< + QueryResult + >; + const dbConnection = getMockDBConnection({ + sql: async () => { + return mockResponse; + } + }); + + const repo = new OccurrenceRepository(dbConnection); + const id = await repo.updateDWCSourceForOccurrenceSubmission(1, '{}'); + expect(id).to.be.eql(1); + }); + + it('should throw Failed to update occurrence submission error', async () => { + const mockResponse = ({ rows: [] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: async () => { + return mockResponse; + } + }); + + try { + const repo = new OccurrenceRepository(dbConnection); + await repo.updateDWCSourceForOccurrenceSubmission(1, '{}'); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql( + SUBMISSION_MESSAGE_TYPE.FAILED_UPDATE_OCCURRENCE_SUBMISSION + ); + } + }); + }); + + describe('findSpatialMetadataBySubmissionSpatialComponentIds', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ rowCount: 1, rows: [{ id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + knex: () => mockResponse + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.findSpatialMetadataBySubmissionSpatialComponentIds([1]); + expect(response).to.be.eql([{ id: 1 }]); + }); + }); + + describe('softDeleteOccurrenceSubmission', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ rowCount: 1, rows: [{ id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: () => mockResponse + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.softDeleteOccurrenceSubmission(1); + expect(response).to.be.eql(undefined); + }); + }); + + describe('deleteSubmissionSpatialComponent', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ rowCount: 1, rows: [{ id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: () => mockResponse + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.deleteSubmissionSpatialComponent(1); + expect(response).to.be.eql([{ id: 1 }]); + }); + }); + + describe('deleteSpatialTransformSubmission', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ rowCount: 1, rows: [{ id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + sql: () => mockResponse + }); + const repo = new OccurrenceRepository(dbConnection); + const response = await repo.deleteSpatialTransformSubmission(1); + expect(response).to.be.eql(undefined); + }); + }); +}); diff --git a/api/src/repositories/occurrence-repository.ts b/api/src/repositories/occurrence-repository.ts new file mode 100644 index 0000000000..52d4b4417c --- /dev/null +++ b/api/src/repositories/occurrence-repository.ts @@ -0,0 +1,304 @@ +import { FeatureCollection, GeoJsonProperties } from 'geojson'; +import { Knex } from 'knex'; +import SQL from 'sql-template-strings'; +import { SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { getKnex } from '../database/db'; +import { appendSQLColumnsEqualValues, AppendSQLColumnsEqualValues } from '../utils/sql-utils'; +import { SubmissionErrorFromMessageType } from '../utils/submission-error'; +import { BaseRepository } from './base-repository'; + +export interface IOccurrenceSubmission { + occurrence_submission_id: number; + survey_id: number; + template_methodology_species_id: number; + source: string; + input_key: string; + input_file_name: string; + output_key: string; + output_file_name: string; + darwin_core_source: Record; +} + +export type EmptyObject = Record; +export interface ITaxaData { + associated_taxa?: string; + vernacular_name?: string; + submission_spatial_component_id: number; +} + +export interface ISubmissionSpatialSearchResponseRow { + taxa_data: ITaxaData[]; + spatial_component: { + spatial_data: FeatureCollection | EmptyObject; + }; +} + +export interface ISpatialComponentFeaturePropertiesRow { + spatial_component_properties: GeoJsonProperties; +} + +export class OccurrenceRepository extends BaseRepository { + async updateDWCSourceForOccurrenceSubmission(submissionId: number, jsonData: string): Promise { + try { + const sql = SQL` + UPDATE + occurrence_submission + SET + darwin_core_source = ${jsonData} + WHERE + occurrence_submission_id = ${submissionId} + RETURNING + occurrence_submission_id; + `; + const response = await this.connection.sql<{ occurrence_submission_id: number }>(sql); + + if (!response.rowCount) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPDATE_OCCURRENCE_SUBMISSION); + } + return response.rows[0].occurrence_submission_id; + } catch (error) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPDATE_OCCURRENCE_SUBMISSION); + } + } + + /** + * Gets an `occurrence_submission` for an id or null if nothing is found + * + * @param {number} submissionId + * @return {*} {Promise} + */ + async getOccurrenceSubmission(submissionId: number): Promise { + const sql = SQL` + SELECT + * + FROM + occurrence_submission + WHERE + occurrence_submission_id = ${submissionId}; + `; + + const response = await this.connection.query(sql.text, sql.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_OCCURRENCE); + } + return result; + } + + /** + * Gets a list of `occurrence` for a `occurrence_submission_id`. + * + * @param {number} submissionId + * @return {*} {Promise} + */ + async getOccurrencesForView(submissionId: number): Promise { + const knex = getKnex(); + + const queryBuilder = knex + .queryBuilder() + .with('distinct_geographic_points', this._withDistinctGeographicPoints) + .with('with_filtered_spatial_component', (qb1) => { + // Get the spatial components that match the search filters + qb1 + .select( + knex.raw( + "jsonb_array_elements(ssc.spatial_component -> 'features') #> '{properties, dwc, datasetID}' as dataset_id" + ), + knex.raw( + "jsonb_array_elements(ssc.spatial_component -> 'features') #> '{properties, dwc, associatedTaxa}' as associated_taxa" + ), + knex.raw( + "jsonb_array_elements(ssc.spatial_component -> 'features') #> '{properties, dwc, vernacularName}' as vernacular_name" + ), + 'ssc.submission_spatial_component_id', + 'ssc.occurrence_submission_id', + 'ssc.spatial_component', + 'ssc.geography' + ) + .from('submission_spatial_component as ssc') + .leftJoin('distinct_geographic_points as p', 'p.geography', 'ssc.geography') + .groupBy('ssc.submission_spatial_component_id') + .groupBy('ssc.occurrence_submission_id') + .groupBy('ssc.spatial_component') + .groupBy('ssc.geography'); + + qb1.where((qb2) => { + qb2.whereRaw( + `occurrence_submission_id in (select occurrence_submission_id from submission_spatial_component where occurrence_submission_id in (${submissionId}))` + ); + }); + }) + .with('with_coalesced_spatial_components', (qb3) => { + qb3 + .select( + // Select the non-secure spatial component from the search results + 'submission_spatial_component_id', + 'occurrence_submission_id', + 'geography', + knex.raw( + `jsonb_build_object( 'submission_spatial_component_id', wfsc.submission_spatial_component_id, 'associated_taxa', wfsc.associated_taxa, 'vernacular_name', wfsc.vernacular_name) taxa_data_object` + ), + knex.raw(`jsonb_build_object( 'spatial_data', wfsc.spatial_component) spatial_component`) + ) + .from(knex.raw('with_filtered_spatial_component as wfsc')); + }) + .select( + knex.raw('array_agg(submission_spatial_component_id) as submission_spatial_component_ids'), + knex.raw('array_agg(taxa_data_object) as taxa_data'), + knex.raw('(array_agg(spatial_component))[1] as spatial_component'), + 'geography' + ) + .from('with_coalesced_spatial_components') + // Filter out secure spatial components that have no spatial representation + // The user is not allowed to see any aspect of these particular spatial components + .whereRaw("spatial_component->'spatial_data' != '{}'") + .groupBy('geography'); + + const response = await this.connection.knex(queryBuilder); + + return response.rows; + } + + _withDistinctGeographicPoints(qb1: Knex.QueryBuilder) { + qb1 + .distinct() + .select('geography') + .from('submission_spatial_component') + .whereRaw(`geometrytype(geography) = 'POINT'`) + .whereRaw(`jsonb_path_exists(spatial_component,'$.features[*] \\? (@.properties.type == "Occurrence")')`); + } + + /** + * Update existing `occurrence_submission` record with outputKey and outputFileName. + * + * @param {number} submissionId + * @param {string} outputFileName + * @param {string} outputKey + * @return {*} {Promise} + */ + async updateSurveyOccurrenceSubmissionWithOutputKey( + submissionId: number, + outputFileName: string, + outputKey: string + ): Promise { + const items: AppendSQLColumnsEqualValues[] = []; + + items.push({ columnName: 'output_file_name', columnValue: outputFileName }); + + items.push({ columnName: 'output_key', columnValue: outputKey }); + + const sqlStatement = SQL` + UPDATE occurrence_submission + SET + `; + + appendSQLColumnsEqualValues(sqlStatement, items); + + sqlStatement.append(SQL` + WHERE + occurrence_submission_id = ${submissionId} + RETURNING occurrence_submission_id as id; + `); + + const updateResponse = await await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!updateResponse || !updateResponse.rowCount) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPDATE_OCCURRENCE_SUBMISSION); + } + + return updateResponse.rows[0]; + } + + /** + * Query builder to find spatial component from a given submission id + * + * @param {number} submission_spatial_component_id + * @return {*} {Promise} + * @memberof SpatialRepository + */ + async findSpatialMetadataBySubmissionSpatialComponentIds( + submission_spatial_component_ids: number[] + ): Promise { + const knex = getKnex(); + const queryBuilder = knex + .queryBuilder() + .with('with_filtered_spatial_component', (qb1) => { + // Get the spatial components that match the search filters + qb1 + .select() + .from('submission_spatial_component as ssc') + .whereIn('submission_spatial_component_id', submission_spatial_component_ids); + }) + .select( + // Select the non-secure spatial component from the search results + knex.raw( + `jsonb_array_elements(wfsc.spatial_component -> 'features') #> '{properties}' as spatial_component_properties` + ) + ) + .from(knex.raw('with_filtered_spatial_component as wfsc')); + + const response = await this.connection.knex(queryBuilder); + + return response.rows; + } + + /** + * Soft delete Occurrence Submission, setting a delete Timestamp + * + * @param {number} occurrenceSubmissionId + * @memberof OccurrenceRepository + */ + async softDeleteOccurrenceSubmission(occurrenceSubmissionId: number) { + const sqlStatement = SQL` + UPDATE occurrence_submission + SET delete_timestamp = now() + WHERE occurrence_submission_id = ${occurrenceSubmissionId}; + `; + + await this.connection.sql(sqlStatement); + } + + /** + * Delete all spatial components by occurrence Id + * + * @param {number} occurrenceSubmissionId + * @return {*} {Promise<{ submission_spatial_component_id: number }[]>} + * @memberof OccurrenceRepository + */ + async deleteSubmissionSpatialComponent( + occurrenceSubmissionId: number + ): Promise<{ submission_spatial_component_id: number }[]> { + const sqlDeleteStatement = SQL` + DELETE FROM + submission_spatial_component + WHERE + occurrence_submission_id = ${occurrenceSubmissionId} + RETURNING + submission_spatial_component_id; + `; + + return (await this.connection.sql<{ submission_spatial_component_id: number }>(sqlDeleteStatement)).rows; + } + + /** + * Delete all spatial transform history by occurrence Id + * + * @param {number} occurrenceSubmissionId + * @return {*} {Promise} + * @memberof OccurrenceRepository + */ + async deleteSpatialTransformSubmission(occurrenceSubmissionId: number): Promise { + const sqlDeleteStatement = SQL` + DELETE FROM spatial_transform_submission + USING spatial_transform_submission as sts + LEFT OUTER JOIN submission_spatial_component as ssc ON + sts.submission_spatial_component_id = ssc.submission_spatial_component_id + WHERE + ssc.occurrence_submission_id = ${occurrenceSubmissionId}; + `; + + await this.connection.sql(sqlDeleteStatement); + } +} diff --git a/api/src/repositories/permit-repository.test.ts b/api/src/repositories/permit-repository.test.ts new file mode 100644 index 0000000000..12a33364a9 --- /dev/null +++ b/api/src/repositories/permit-repository.test.ts @@ -0,0 +1,261 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { ApiError } from '../errors/api-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { IPermitModel, PermitRepository } from './permit-repository'; + +chai.use(sinonChai); + +describe('PermitRepository', () => { + describe('getPermitBySurveyId', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array of survey permits by survey id', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ permit_id: 2 }] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + const response = await permitRepository.getPermitBySurveyId(1); + + expect(response).to.eql([{ permit_id: 2 }]); + }); + + it('should throw an error if no permits were found', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + try { + await permitRepository.getPermitBySurveyId(1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get permit by Id'); + } + }); + }); + + describe('getPermitByUser', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array of survey permits by user', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ permit_id: 2 }] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + const response = await permitRepository.getPermitByUser(1); + + expect(response).to.eql([{ permit_id: 2 }]); + }); + + it('should throw an error if no permits were found', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + try { + await permitRepository.getPermitByUser(1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get permit by user Id'); + } + }); + }); + + describe('getAllPermits', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array containing all survey permits', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ permit_id: 2 }] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + const response = await permitRepository.getAllPermits(); + + expect(response).to.eql([{ permit_id: 2 }]); + }); + + it('should throw an error if no permits were found', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + try { + await permitRepository.getAllPermits(); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get all permits'); + } + }); + }); + + describe('updateSurveyPermit', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array of survey permits by user', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ permit_id: 2 }] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + const response = await permitRepository.updateSurveyPermit(1, 2, '12345', 'permit type'); + + expect(response).to.equal(2); + }); + + it('should throw an error if update failed', async () => { + const mockQueryResponse = ({ + rowCount: 0, + rows: [] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + try { + await permitRepository.updateSurveyPermit(1, 2, '12345', 'permit type'); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get update Survey Permit'); + } + }); + }); + + describe('createSurveyPermit', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array of survey permits by user', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ permit_id: 2 }] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + const response = await permitRepository.createSurveyPermit(1, '12345', 'permit type'); + + expect(response).to.equal(2); + }); + + it('should throw an error if create failed', async () => { + const mockQueryResponse = ({ + rowCount: 0, + rows: [] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + try { + await permitRepository.createSurveyPermit(1, '12345', 'permit type'); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get Create Survey Permit'); + } + }); + }); + + describe('deleteSurveyPermit', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array of survey permits by user', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ permit_id: 2 }] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + const response = await permitRepository.deleteSurveyPermit(1, 2); + + expect(response).to.equal(2); + }); + + it('should throw an error if delete failed', async () => { + const mockQueryResponse = ({ + rowCount: 0, + rows: [] + } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new PermitRepository(mockDBConnection); + + try { + await permitRepository.deleteSurveyPermit(1, 2); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get Delete Survey Permit'); + } + }); + }); +}); diff --git a/api/src/repositories/permit-repository.ts b/api/src/repositories/permit-repository.ts new file mode 100644 index 0000000000..9c51104619 --- /dev/null +++ b/api/src/repositories/permit-repository.ts @@ -0,0 +1,241 @@ +import SQL from 'sql-template-strings'; +import { PROJECT_ROLE } from '../constants/roles'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { BaseRepository } from './base-repository'; + +export interface IPermitModel { + permit_id: number; + survey_id: number | null; + number: string; + type: string; + create_date: string; + create_user: number; + update_date: string | null; + update_user: number | null; + revision_count: number; +} + +/** + * A repository class for accessing permit data. + * + * @export + * @class PermitRepository + * @extends {BaseRepository} + */ +export class PermitRepository extends BaseRepository { + /** + * Fetch permit records by survey_id. + * + * @param {number} surveyId + * @return {*} {Promise} + * @memberof PermitRepository + */ + async getPermitBySurveyId(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + p.* + FROM + permit p + WHERE + p.survey_id = ${surveyId} + ; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get permit by Id', [ + 'PermitRepository->getPermitBySurveyId', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + /** + * Fetch permit records by user. + * + * @param + * @return {*} {Promise} + * @memberof PermitRepository + */ + async getPermitByUser(systemUserId: number): Promise { + const sqlStatement = SQL` + SELECT + p.* + FROM + permit p + , survey s + , project p2 + , project_participation pp + , project_role pr + WHERE + p.survey_id = s.survey_id + AND + s.project_id = p2.project_id + AND + p2.project_id = pp.project_id + AND + pr."name" in ('${PROJECT_ROLE.PROJECT_LEAD}', '${PROJECT_ROLE.PROJECT_EDITOR}') + AND + pp.project_role_id = pr.project_role_id + AND + pp.system_user_id = ${systemUserId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get permit by user Id', [ + 'PermitRepository->getPermitByUser', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + /** + * Fetch all permit records. + * + * @param + * @return {*} {Promise} + * @memberof PermitRepository + */ + async getAllPermits(): Promise { + const sqlStatement = SQL` + SELECT + p.* + FROM + permit p; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get all permits', [ + 'PermitRepository->getAllPermits', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows; + } + + /** + * Update survey permit. + * + * @param {number} surveyId + * @param {number} permitId + * @param {string} permitNumber + * @param {string} permitType + * @return {*} number + * @memberof PermitRepository + */ + async updateSurveyPermit( + surveyId: number, + permitId: number, + permitNumber: string, + permitType: string + ): Promise { + const sqlStatement = SQL` + UPDATE permit + SET + "number" = ${permitNumber} + , type = ${permitType} + WHERE + permit_id = ${permitId} + AND + survey_id = ${surveyId} + RETURNING permit_id + ; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get update Survey Permit', [ + 'PermitRepository->updateSurveyPermit', + 'row[0] was null or undefined, expected row[0] != null' + ]); + } + + return result.permit_id; + } + + /** + * Create survey permit. + * + * @param {number} surveyId + * @param {string} permitNumber + * @param {string} permitType + * @return {*} number + * @memberof PermitRepository + */ + async createSurveyPermit(surveyId: number, permitNumber: string, permitType: string): Promise { + const sqlStatement = SQL` + INSERT INTO + permit (survey_id, "number", type) + VALUES + (${surveyId}, ${permitNumber}, ${permitType}) + RETURNING permit_id + ; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get Create Survey Permit', [ + 'PermitRepository->createSurveyPermit', + 'row[0] was null or undefined, expected row[0] != null' + ]); + } + + return result.permit_id; + } + + /** + * Delete survey permit. + * + * @param {number} surveyId + * @param {number} permitId + * @return {*} number + * @memberof PermitRepository + */ + async deleteSurveyPermit(surveyId: number, permitId: number): Promise { + const sqlStatement = SQL` + DELETE FROM + permit + WHERE + permit_id = ${permitId} + AND + survey_id = ${surveyId} + RETURNING permit_id + ; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get Delete Survey Permit', [ + 'PermitRepository->deleteSurveyPermit', + 'row[0] was null or undefined, expected row[0] != null' + ]); + } + + return result.permit_id; + } +} diff --git a/api/src/repositories/project-repository.test.ts b/api/src/repositories/project-repository.test.ts new file mode 100644 index 0000000000..33637be950 --- /dev/null +++ b/api/src/repositories/project-repository.test.ts @@ -0,0 +1,1154 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { ApiError } from '../errors/api-error'; +import { PostFundingSource, PostProjectObject } from '../models/project-create'; +import { PutFundingSource } from '../models/project-update'; +import { + GetAttachmentsData, + GetCoordinatorData, + GetFundingData, + GetIUCNClassificationData, + GetLocationData, + GetObjectivesData, + GetProjectData, + GetReportAttachmentsData +} from '../models/project-view'; +import { getMockDBConnection } from '../__mocks__/db'; +import { ProjectRepository } from './project-repository'; + +chai.use(sinonChai); + +describe('ProjectRepository', () => { + describe('getProjectFundingSourceIds', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return an array of project funding source ids', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ project_funding_source_id: 2 }] + } as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + const response = await permitRepository.getProjectFundingSourceIds(1); + + expect(response).to.eql([{ project_funding_source_id: 2 }]); + }); + + it('should throw an error if no funding were found', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + try { + await permitRepository.getProjectFundingSourceIds(1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to get project funding sources by Id'); + } + }); + }); + + describe('deleteSurveyFundingSourceConnectionToProject', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should delete survey funding source connected to project returning survey_id', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ survey_id: 2 }] + } as unknown) as QueryResult<{ + survey_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + const response = await permitRepository.deleteSurveyFundingSourceConnectionToProject(1); + + expect(response).to.eql([{ survey_id: 2 }]); + }); + + it('should throw an error if delete failed', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + try { + await permitRepository.deleteSurveyFundingSourceConnectionToProject(1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to delete survey funding source by id'); + } + }); + }); + + describe('deleteProjectFundingSource', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should delete project funding source', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ survey_id: 2 }] + } as unknown) as QueryResult<{ + survey_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + const response = await permitRepository.deleteProjectFundingSource(1); + + expect(response).to.eql([{ survey_id: 2 }]); + }); + + it('should throw an error delete failed', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult<{ + survey_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + try { + await permitRepository.deleteProjectFundingSource(1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to delete project funding source'); + } + }); + }); + + describe('updateProjectFundingSource', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should update project funding source', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ project_funding_source_id: 2 }] + } as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const data = new PutFundingSource({ + id: 1, + investment_action_category: 1, + agency_project_id: 'string', + funding_amount: 1, + start_date: 'string', + end_date: 'string', + revision_count: '1' + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + const response = await permitRepository.updateProjectFundingSource(data, 1); + + expect(response).to.eql({ project_funding_source_id: 2 }); + }); + + it('should throw an error update failed', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const data = new PutFundingSource({ + id: 1, + investment_action_category: 1, + agency_project_id: 'string', + funding_amount: 1, + start_date: 'string', + end_date: 'string', + revision_count: '1' + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + try { + await permitRepository.updateProjectFundingSource(data, 1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to update project funding source'); + } + }); + }); + + describe('insertProjectFundingSource', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should insert project funding source', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [{ project_funding_source_id: 2 }] + } as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const data = new PutFundingSource({ + id: 1, + investment_action_category: 1, + agency_project_id: 'string', + funding_amount: 1, + start_date: 'string', + end_date: 'string', + revision_count: '1' + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + const response = await permitRepository.insertProjectFundingSource(data, 1); + + expect(response).to.eql({ project_funding_source_id: 2 }); + }); + + it('should throw an error insert failed', async () => { + const mockQueryResponse = ({} as unknown) as QueryResult<{ + project_funding_source_id: number; + }>; + + const mockDBConnection = getMockDBConnection({ + sql: sinon.stub().resolves(mockQueryResponse) + }); + + const data = new PutFundingSource({ + id: 1, + investment_action_category: 1, + agency_project_id: 'string', + funding_amount: 1, + start_date: 'string', + end_date: 'string', + revision_count: '1' + }); + + const permitRepository = new ProjectRepository(mockDBConnection); + + try { + await permitRepository.insertProjectFundingSource(data, 1); + expect.fail(); + } catch (error) { + expect((error as ApiError).message).to.equal('Failed to insert project funding source'); + } + }); + }); + + describe('deleteDraft', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteDraft(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ rows: [{ id: 1 }], rowCount: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.deleteDraft(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete draft'); + } + }); + }); + + describe('getSingleDraft', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getSingleDraft(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getSingleDraft(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get draft'); + } + }); + }); + + describe('deleteProjectParticipationRecord', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteProjectParticipationRecord(1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.deleteProjectParticipationRecord(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete project participation record'); + } + }); + }); + + describe('getProjectParticipant', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getProjectParticipant(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql({ id: 1 }); + }); + + it('should return null', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getProjectParticipant(1, 1); + + expect(response).to.eql(null); + }); + }); + + describe('getProjectParticipants', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getProjectParticipants(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getProjectParticipants(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project team members'); + } + }); + }); + + describe('addProjectParticipant', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.addProjectParticipant(1, 1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.addProjectParticipant(1, 1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project team member'); + } + }); + }); + + describe('getProjectList', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = { + coordinator_agency: 'string', + start_date: 'start', + end_date: null, + project_type: 'string', + project_name: 'string', + agency_project_id: 1, + agency_id: 1, + species: [{ id: 1 }], + keyword: 'string' + }; + + const response = await repository.getProjectList(false, 1, input); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should return result with different filter fields', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = { + coordinator_agency: 'string', + start_date: null, + end_date: 'end', + project_type: 'string', + project_name: 'string', + agency_project_id: 1, + agency_id: 1, + species: [{ id: 1 }], + keyword: 'string' + }; + + const response = await repository.getProjectList(true, 1, input); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should return result with both data fields', async () => { + const mockResponse = ({ rows: null, rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = { + start_date: 'start', + end_date: 'end' + }; + + const response = await repository.getProjectList(true, 1, input); + + expect(response).to.not.be.null; + expect(response).to.eql([]); + }); + }); + + describe('getProjectData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getProjectData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetProjectData({ id: 1 }, [{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getProjectData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project data'); + } + }); + }); + + describe('getObjectivesData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getObjectivesData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetObjectivesData({ id: 1 })); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getObjectivesData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project objectives data'); + } + }); + }); + + describe('getCoordinatorData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getCoordinatorData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetCoordinatorData({ id: 1 })); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getCoordinatorData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project contact data'); + } + }); + }); + + describe('getLocationData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getLocationData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetLocationData([{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getLocationData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project location data'); + } + }); + }); + + describe('getIUCNClassificationData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getIUCNClassificationData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetIUCNClassificationData([{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getIUCNClassificationData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project IUCN Classification data'); + } + }); + }); + + describe('getFundingData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getFundingData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetFundingData([{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getFundingData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project funding data'); + } + }); + }); + + describe('getIndigenousPartnershipsRows', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getIndigenousPartnershipsRows(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getIndigenousPartnershipsRows(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project Indigenous Partnerships data'); + } + }); + }); + + describe('getStakeholderPartnershipsRows', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getStakeholderPartnershipsRows(1); + + expect(response).to.not.be.null; + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getStakeholderPartnershipsRows(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project Stakeholder Partnerships data'); + } + }); + }); + + describe('getAttachmentsData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getAttachmentsData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetAttachmentsData([{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.getAttachmentsData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project Attachment data'); + } + }); + }); + + describe('getReportAttachmentsData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getReportAttachmentsData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetReportAttachmentsData([{ id: 1 }])); + }); + + it('should return null', async () => { + const mockResponse = ({ rows: [], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.getReportAttachmentsData(1); + + expect(response).to.not.be.null; + expect(response).to.eql(new GetReportAttachmentsData([])); + }); + }); + + describe('insertProject', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = ({ + project: { + type: 1, + name: 'name', + start_date: 'start_date', + end_date: 'end_date', + comments: 'comments' + }, + objectives: { objectives: '', caveats: '' }, + location: { location_description: '', geometry: [{ id: 1 }] }, + coordinator: { + first_name: 'first_name', + last_name: 'last_name', + email_address: 'email_address', + coordinator_agency: 'coordinator_agency', + share_contact_details: 'share_contact_details' + } + } as unknown) as PostProjectObject; + + const response = await repository.insertProject(input); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should return result when no geometry given', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = ({ + project: { + type: 1, + name: 'name', + start_date: 'start_date', + end_date: 'end_date', + comments: 'comments' + }, + objectives: { objectives: '', caveats: '' }, + location: { location_description: '', geometry: [] }, + coordinator: { + first_name: 'first_name', + last_name: 'last_name', + email_address: 'email_address', + coordinator_agency: 'coordinator_agency', + share_contact_details: 'share_contact_details' + } + } as unknown) as PostProjectObject; + + const response = await repository.insertProject(input); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = ({ + project: { + type: 1, + name: 'name', + start_date: 'start_date', + end_date: 'end_date', + comments: 'comments' + }, + objectives: { objectives: '', caveats: '' }, + location: { location_description: '', geometry: [] }, + coordinator: { + first_name: 'first_name', + last_name: 'last_name', + email_address: 'email_address', + coordinator_agency: 'coordinator_agency', + share_contact_details: 'share_contact_details' + } + } as unknown) as PostProjectObject; + + try { + await repository.insertProject(input); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project boundary data'); + } + }); + }); + + describe('insertFundingSource', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = ({ + investment_action_category: 1, + agency_project_id: 1, + funding_amount: 123, + start_date: 'start', + end_date: 'end' + } as unknown) as PostFundingSource; + + const response = await repository.insertFundingSource(input, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const input = ({ + investment_action_category: 1, + agency_project_id: 1, + funding_amount: 123, + start_date: 'start', + end_date: 'end' + } as unknown) as PostFundingSource; + + try { + await repository.insertFundingSource(input, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project funding data'); + } + }); + }); + + describe('insertIndigenousNation', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.insertIndigenousNation(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: null, rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.insertIndigenousNation(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project first nations partnership data'); + } + }); + }); + + describe('insertStakeholderPartnership', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.insertStakeholderPartnership('partner', 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.insertStakeholderPartnership('partner', 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project stakeholder partnership data'); + } + }); + }); + + describe('insertClassificationDetail', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.insertClassificationDetail(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.insertClassificationDetail(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project IUCN data'); + } + }); + }); + + describe('insertActivity', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.insertActivity(1, 1); + + expect(response).to.not.be.null; + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.insertActivity(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project activity data'); + } + }); + }); + + describe('insertParticipantRole', () => { + it('should throw an error when no user found', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.insertParticipantRole(1, 'string'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to identify system user ID'); + } + }); + + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse, systemUserId: () => 1 }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.insertParticipantRole(1, 'string'); + + expect(response).to.not.be.null; + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse, systemUserId: () => 1 }); + + const repository = new ProjectRepository(dbConnection); + + try { + await repository.insertParticipantRole(1, 'string'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert project team member'); + } + }); + }); + + describe('deleteIUCNData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteIUCNData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteIndigenousPartnershipsData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteIndigenousPartnershipsData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteStakeholderPartnershipsData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteStakeholderPartnershipsData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteActivityData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteActivityData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteProject', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new ProjectRepository(dbConnection); + + const response = await repository.deleteProject(1); + + expect(response).to.eql(undefined); + }); + }); +}); diff --git a/api/src/repositories/project-repository.ts b/api/src/repositories/project-repository.ts new file mode 100644 index 0000000000..88eb4db6a5 --- /dev/null +++ b/api/src/repositories/project-repository.ts @@ -0,0 +1,1202 @@ +import { NumberOfAutoScalingGroups } from 'aws-sdk/clients/autoscaling'; +import { QueryResult } from 'pg'; +import SQL, { SQLStatement } from 'sql-template-strings'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { PostFundingSource, PostProjectObject } from '../models/project-create'; +import { + PutCoordinatorData, + PutFundingSource, + PutLocationData, + PutObjectivesData, + PutProjectData +} from '../models/project-update'; +import { + GetAttachmentsData, + GetCoordinatorData, + GetFundingData, + GetIUCNClassificationData, + GetLocationData, + GetObjectivesData, + GetProjectData, + GetReportAttachmentsData +} from '../models/project-view'; +import { queries } from '../queries/queries'; +import { BaseRepository } from './base-repository'; + +/** + * A repository class for accessing project data. + * + * @export + * @class ProjectRepository + * @extends {BaseRepository} + */ +export class ProjectRepository extends BaseRepository { + async getProjectFundingSourceIds( + projectId: number + ): Promise< + { + project_funding_source_id: number; + }[] + > { + const sqlStatement = SQL` + SELECT + pfs.project_funding_source_id + FROM + project_funding_source pfs + WHERE + pfs.project_id = ${projectId}; + `; + + const response = await this.connection.sql<{ + project_funding_source_id: number; + }>(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project funding sources by Id', [ + 'ProjectRepository->getProjectFundingSourceIds', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async deleteSurveyFundingSourceConnectionToProject(projectFundingSourceId: number) { + const sqlStatement: SQLStatement = SQL` + DELETE + from survey_funding_source sfs + WHERE + sfs.project_funding_source_id = ${projectFundingSourceId} + RETURNING survey_id;`; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to delete survey funding source by id', [ + 'ProjectRepository->deleteSurveyFundingSourceConnectionToProject', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async deleteProjectFundingSource(projectFundingSourceId: number) { + const sqlStatement: SQLStatement = SQL` + DELETE + from project_funding_source + WHERE + project_funding_source_id = ${projectFundingSourceId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to delete project funding source', [ + 'ProjectRepository->deleteProjectFundingSource', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async updateProjectFundingSource( + fundingSource: PutFundingSource, + projectId: number + ): Promise<{ project_funding_source_id: number }> { + const sqlStatement: SQLStatement = SQL` + UPDATE + project_funding_source + SET + project_id = ${projectId}, + investment_action_category_id = ${fundingSource.investment_action_category}, + funding_source_project_id = ${fundingSource.agency_project_id}, + funding_amount = ${fundingSource.funding_amount}, + funding_start_date = ${fundingSource.start_date}, + funding_end_date = ${fundingSource.end_date} + WHERE + project_funding_source_id = ${fundingSource.id} + RETURNING + project_funding_source_id; + `; + + const response = await this.connection.sql<{ project_funding_source_id: number }>(sqlStatement); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to update project funding source', [ + 'ProjectRepository->putProjectFundingSource', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async insertProjectFundingSource( + fundingSource: PutFundingSource, + projectId: number + ): Promise<{ project_funding_source_id: number }> { + const sqlStatement: SQLStatement = SQL` + INSERT INTO project_funding_source ( + project_id, + investment_action_category_id, + funding_source_project_id, + funding_amount, + funding_start_date, + funding_end_date + ) VALUES ( + ${projectId}, + ${fundingSource.investment_action_category}, + ${fundingSource.agency_project_id}, + ${fundingSource.funding_amount}, + ${fundingSource.start_date}, + ${fundingSource.end_date} + ) + RETURNING + project_funding_source_id; + `; + + const response = await this.connection.sql<{ project_funding_source_id: number }>(sqlStatement); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to insert project funding source', [ + 'ProjectRepository->putProjectFundingSource', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async deleteDraft(draftId: number): Promise { + const sqlStatement = SQL` + DELETE from webform_draft + WHERE webform_draft_id = ${draftId}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response) { + throw new ApiExecuteSQLError('Failed to delete draft', [ + 'ProjectRepository->deleteDraft', + 'response was null or undefined, expected response != null' + ]); + } + + return response; + } + + async getSingleDraft(draftId: number): Promise<{ id: number; name: string; data: any }> { + const sqlStatement: SQLStatement = SQL` + SELECT + webform_draft_id as id, + name, + data + FROM + webform_draft + WHERE + webform_draft_id = ${draftId}; + `; + + const response = await this.connection.sql<{ id: number; name: string; data: any }>(sqlStatement); + + if (!response || !response?.rows?.[0]) { + throw new ApiExecuteSQLError('Failed to get draft', [ + 'ProjectRepository->getSingleDraft', + 'response was null or undefined, expected response != null' + ]); + } + + return response?.rows?.[0]; + } + + async deleteProjectParticipationRecord(projectParticipationId: number): Promise { + const sqlStatement = SQL` + DELETE FROM + project_participation + WHERE + project_participation_id = ${projectParticipationId} + RETURNING + *; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response || !response.rowCount) { + throw new ApiExecuteSQLError('Failed to delete project participation record', [ + 'ProjectRepository->deleteProjectParticipationRecord', + 'rows was null or undefined, expected rows != null' + ]); + } + + return response.rows[0]; + } + + async getProjectParticipant(projectId: number, systemUserId: number): Promise { + const sqlStatement = SQL` + SELECT + pp.project_id, + pp.system_user_id, + su.record_end_date, + array_remove(array_agg(pr.project_role_id), NULL) AS project_role_ids, + array_remove(array_agg(pr.name), NULL) AS project_role_names + FROM + project_participation pp + LEFT JOIN + project_role pr + ON + pp.project_role_id = pr.project_role_id + LEFT JOIN + system_user su + ON + pp.system_user_id = su.system_user_id + WHERE + pp.project_id = ${projectId} + AND + pp.system_user_id = ${systemUserId} + AND + su.record_end_date is NULL + GROUP BY + pp.project_id, + pp.system_user_id, + su.record_end_date ; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + return result; + } + + async getProjectParticipants(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + pp.project_participation_id, + pp.project_id, + pp.system_user_id, + pp.project_role_id, + pr.name project_role_name, + su.user_identifier, + su.user_guid, + su.user_identity_source_id + FROM + project_participation pp + LEFT JOIN + system_user su + ON + pp.system_user_id = su.system_user_id + LEFT JOIN + project_role pr + ON + pr.project_role_id = pp.project_role_id + WHERE + pp.project_id = ${projectId}; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project team members', [ + 'ProjectRepository->getProjectParticipants', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async addProjectParticipant( + projectId: number, + systemUserId: number, + projectParticipantRoleId: number + ): Promise { + const sqlStatement = SQL` + INSERT INTO project_participation ( + project_id, + system_user_id, + project_role_id + ) VALUES ( + ${projectId}, + ${systemUserId}, + ${projectParticipantRoleId} + ) + RETURNING + *; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!response || !response.rowCount) { + throw new ApiExecuteSQLError('Failed to insert project team member', [ + 'ProjectRepository->getProjectParticipants', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async getProjectList(isUserAdmin: boolean, systemUserId: number | null, filterFields: any): Promise { + const sqlStatement = SQL` + SELECT + p.project_id as id, + p.name, + p.start_date, + p.end_date, + p.coordinator_agency_name as coordinator_agency, + pt.name as project_type + from + project as p + left outer join project_type as pt + on p.project_type_id = pt.project_type_id + left outer join project_funding_source as pfs + on pfs.project_id = p.project_id + left outer join investment_action_category as iac + on pfs.investment_action_category_id = iac.investment_action_category_id + left outer join funding_source as fs + on iac.funding_source_id = fs.funding_source_id + left outer join survey as s + on s.project_id = p.project_id + left outer join study_species as sp + on sp.survey_id = s.survey_id + where 1 = 1 + `; + + if (!isUserAdmin) { + sqlStatement.append(SQL` + AND p.project_id IN ( + SELECT + project_id + FROM + project_participation + where + system_user_id = ${systemUserId} + ) + `); + } + + if (filterFields && Object.keys(filterFields).length !== 0 && filterFields.constructor === Object) { + if (filterFields.coordinator_agency) { + sqlStatement.append(SQL` AND p.coordinator_agency_name = ${filterFields.coordinator_agency}`); + } + + if (filterFields.start_date && !filterFields.end_date) { + sqlStatement.append(SQL` AND p.start_date >= ${filterFields.start_date}`); + } + + if (!filterFields.start_date && filterFields.end_date) { + sqlStatement.append(SQL` AND p.end_date <= ${filterFields.end_date}`); + } + + if (filterFields.start_date && filterFields.end_date) { + sqlStatement.append( + SQL` AND p.start_date >= ${filterFields.start_date} AND p.end_date <= ${filterFields.end_date}` + ); + } + + if (filterFields.project_type) { + sqlStatement.append(SQL` AND pt.name = ${filterFields.project_type}`); + } + + if (filterFields.project_name) { + sqlStatement.append(SQL` AND p.name = ${filterFields.project_name}`); + } + + if (filterFields.agency_project_id) { + sqlStatement.append(SQL` AND pfs.funding_source_project_id = ${filterFields.agency_project_id}`); + } + + if (filterFields.agency_id) { + sqlStatement.append(SQL` AND fs.funding_source_id = ${filterFields.agency_id}`); + } + + if (filterFields.species && filterFields.species.length) { + sqlStatement.append(SQL` AND sp.wldtaxonomic_units_id =${filterFields.species[0]}`); + } + + if (filterFields.keyword) { + const keyword_string = '%'.concat(filterFields.keyword).concat('%'); + sqlStatement.append(SQL` AND p.name ilike ${keyword_string}`); + sqlStatement.append(SQL` OR p.coordinator_agency_name ilike ${keyword_string}`); + sqlStatement.append(SQL` OR fs.name ilike ${keyword_string}`); + sqlStatement.append(SQL` OR s.name ilike ${keyword_string}`); + } + } + + sqlStatement.append(SQL` + group by + p.project_id, + p.name, + p.start_date, + p.end_date, + p.coordinator_agency_name, + pt.name; + `); + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!response.rows) { + return []; + } + + return response.rows; + } + + async getProjectData(projectId: number): Promise { + const getProjectSqlStatement = SQL` + SELECT + project.project_id as id, + project.uuid, + project.project_type_id as pt_id, + project_type.name as type, + project.name, + project.objectives, + project.location_description, + project.start_date, + project.end_date, + project.caveats, + project.comments, + project.coordinator_first_name, + project.coordinator_last_name, + project.coordinator_email_address, + project.coordinator_agency_name, + project.coordinator_public, + project.geojson as geometry, + project.create_date, + project.create_user, + project.update_date, + project.update_user, + project.revision_count + from + project + left outer join + project_type + on project.project_type_id = project_type.project_type_id + where + project.project_id = ${projectId}; + `; + + const getProjectActivitiesSQLStatement = SQL` + SELECT + activity_id + from + project_activity + where project_id = ${projectId}; + `; + + const [project, activity] = await Promise.all([ + this.connection.query(getProjectSqlStatement.text, getProjectSqlStatement.values), + this.connection.query(getProjectActivitiesSQLStatement.text, getProjectActivitiesSQLStatement.values) + ]); + + const projectResult = (project && project.rows && project.rows[0]) || null; + const activityResult = (activity && activity.rows) || null; + + if (!projectResult || !activityResult) { + throw new ApiExecuteSQLError('Failed to get project data', [ + 'ProjectRepository->getProjectData', + 'rows was null or undefined, expected rows != null' + ]); + } + + return new GetProjectData(projectResult, activityResult); + } + + async getObjectivesData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + objectives, + caveats, + revision_count + FROM + project + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project objectives data', [ + 'ProjectRepository->getObjectivesData', + 'rows was null or undefined, expected rows != null' + ]); + } + + return new GetObjectivesData(result); + } + + async getCoordinatorData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + coordinator_first_name, + coordinator_last_name, + coordinator_email_address, + coordinator_agency_name, + coordinator_public, + revision_count + FROM + project + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project contact data', [ + 'ProjectRepository->getCoordinatorData', + 'rows was null or undefined, expected rows != null' + ]); + } + + return new GetCoordinatorData(result); + } + + async getLocationData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + p.location_description, + p.geojson as geometry, + p.revision_count + FROM + project p + WHERE + p.project_id = ${projectId} + GROUP BY + p.location_description, + p.geojson, + p.revision_count; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project location data', [ + 'ProjectRepository->getLocationData', + 'rows was null or undefined, expected rows != null' + ]); + } + + return new GetLocationData(result); + } + + async getIUCNClassificationData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + ical1c.iucn_conservation_action_level_1_classification_id as classification, + ical2s.iucn_conservation_action_level_2_subclassification_id as subClassification1, + ical3s.iucn_conservation_action_level_3_subclassification_id as subClassification2 + FROM + project_iucn_action_classification as piac + LEFT OUTER JOIN + iucn_conservation_action_level_3_subclassification as ical3s + ON + piac.iucn_conservation_action_level_3_subclassification_id = ical3s.iucn_conservation_action_level_3_subclassification_id + LEFT OUTER JOIN + iucn_conservation_action_level_2_subclassification as ical2s + ON + ical3s.iucn_conservation_action_level_2_subclassification_id = ical2s.iucn_conservation_action_level_2_subclassification_id + LEFT OUTER JOIN + iucn_conservation_action_level_1_classification as ical1c + ON + ical2s.iucn_conservation_action_level_1_classification_id = ical1c.iucn_conservation_action_level_1_classification_id + WHERE + piac.project_id = ${projectId} + GROUP BY + ical1c.iucn_conservation_action_level_1_classification_id, + ical2s.iucn_conservation_action_level_2_subclassification_id, + ical3s.iucn_conservation_action_level_3_subclassification_id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project IUCN Classification data', [ + 'ProjectRepository->getIUCNClassificationData', + 'rows was null or undefined, expected rows != null' + ]); + } + + return new GetIUCNClassificationData(result); + } + + async getFundingData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + pfs.project_funding_source_id as id, + fs.funding_source_id as agency_id, + pfs.funding_amount::numeric::int, + pfs.funding_start_date as start_date, + pfs.funding_end_date as end_date, + iac.investment_action_category_id as investment_action_category, + iac.name as investment_action_category_name, + fs.name as agency_name, + pfs.funding_source_project_id as agency_project_id, + pfs.revision_count as revision_count + FROM + project_funding_source as pfs + LEFT OUTER JOIN + investment_action_category as iac + ON + pfs.investment_action_category_id = iac.investment_action_category_id + LEFT OUTER JOIN + funding_source as fs + ON + iac.funding_source_id = fs.funding_source_id + WHERE + pfs.project_id = ${projectId} + GROUP BY + pfs.project_funding_source_id, + fs.funding_source_id, + pfs.funding_source_project_id, + pfs.funding_amount, + pfs.funding_start_date, + pfs.funding_end_date, + iac.investment_action_category_id, + iac.name, + fs.name, + pfs.revision_count + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project funding data', [ + 'ProjectRepository->getFundingData', + 'rows was null or undefined, expected rows != null' + ]); + } + + return new GetFundingData(result); + } + + async getIndigenousPartnershipsRows(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + fn.first_nations_id as id, + fn.name as first_nations_name + FROM + project_first_nation pfn + LEFT OUTER JOIN + first_nations fn + ON + pfn.first_nations_id = fn.first_nations_id + WHERE + pfn.project_id = ${projectId} + GROUP BY + fn.first_nations_id, + fn.name; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project Indigenous Partnerships data', [ + 'ProjectRepository->getIndigenousPartnershipsRows', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async getStakeholderPartnershipsRows(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + name as partnership_name + FROM + stakeholder_partnership + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project Stakeholder Partnerships data', [ + 'ProjectRepository->getStakeholderPartnershipsRows', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result; + } + + async getAttachmentsData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + * + FROM + project_attachment + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project Attachment data', [ + 'ProjectRepository->getAttachmentsData', + 'rows was null or undefined, expected rows != null' + ]); + } + return new GetAttachmentsData(result); + } + + async getReportAttachmentsData(projectId: number): Promise { + const sqlStatement = SQL` + SELECT + pra.project_report_attachment_id + , pra.project_id + , pra.file_name + , pra.title + , pra.description + , pra.year + , pra."key" + , pra.file_size + , array_remove(array_agg(pra2.first_name ||' '||pra2.last_name), null) authors + FROM + project_report_attachment pra + LEFT JOIN project_report_author pra2 ON pra2.project_report_attachment_id = pra.project_report_attachment_id + WHERE pra.project_id = ${projectId} + GROUP BY + pra.project_report_attachment_id + , pra.project_id + , pra.file_name + , pra.title + , pra.description + , pra.year + , pra."key" + , pra.file_size; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows) || null; + + return new GetReportAttachmentsData(result); + } + + async insertProject(postProjectData: PostProjectObject): Promise { + const sqlStatement = SQL` + INSERT INTO project ( + project_type_id, + name, + objectives, + location_description, + start_date, + end_date, + caveats, + comments, + coordinator_first_name, + coordinator_last_name, + coordinator_email_address, + coordinator_agency_name, + coordinator_public, + geojson, + geography + ) VALUES ( + ${postProjectData.project.type}, + ${postProjectData.project.name}, + ${postProjectData.objectives.objectives}, + ${postProjectData.location.location_description}, + ${postProjectData.project.start_date}, + ${postProjectData.project.end_date}, + ${postProjectData.objectives.caveats}, + ${postProjectData.project.comments}, + ${postProjectData.coordinator.first_name}, + ${postProjectData.coordinator.last_name}, + ${postProjectData.coordinator.email_address}, + ${postProjectData.coordinator.coordinator_agency}, + ${postProjectData.coordinator.share_contact_details}, + ${JSON.stringify(postProjectData.location.geometry)} + `; + + if (postProjectData.location.geometry && postProjectData.location.geometry.length) { + const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(postProjectData.location.geometry); + + sqlStatement.append(SQL` + ,public.geography( + public.ST_Force2D( + public.ST_SetSRID( + `); + + sqlStatement.append(geometryCollectionSQL); + + sqlStatement.append(SQL` + , 4326))) + `); + } else { + sqlStatement.append(SQL` + ,null + `); + } + + sqlStatement.append(SQL` + ) + RETURNING + project_id as id; + `); + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert project boundary data', [ + 'ProjectRepository->insertProject', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result.id; + } + + async insertFundingSource(fundingSource: PostFundingSource, project_id: number): Promise { + const sqlStatement = SQL` + INSERT INTO project_funding_source ( + project_id, + investment_action_category_id, + funding_source_project_id, + funding_amount, + funding_start_date, + funding_end_date + ) VALUES ( + ${project_id}, + ${fundingSource.investment_action_category}, + ${fundingSource.agency_project_id}, + ${fundingSource.funding_amount}, + ${fundingSource.start_date}, + ${fundingSource.end_date} + ) + RETURNING + project_funding_source_id as id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert project funding data', [ + 'ProjectRepository->insertFundingSource', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result.id; + } + + async insertIndigenousNation(indigenousNationsId: number, project_id: number): Promise { + const sqlStatement = SQL` + INSERT INTO project_first_nation ( + project_id, + first_nations_id + ) VALUES ( + ${project_id}, + ${indigenousNationsId} + ) + RETURNING + first_nations_id as id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert project first nations partnership data', [ + 'ProjectRepository->insertIndigenousNation', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result.id; + } + + async insertStakeholderPartnership(stakeholderPartner: string, project_id: number): Promise { + const sqlStatement = SQL` + INSERT INTO stakeholder_partnership ( + project_id, + name + ) VALUES ( + ${project_id}, + ${stakeholderPartner} + ) + RETURNING + stakeholder_partnership_id as id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert project stakeholder partnership data', [ + 'ProjectRepository->insertStakeholderPartnership', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result.id; + } + + async insertClassificationDetail(iucn3_id: number, project_id: number): Promise { + const sqlStatement = SQL` + INSERT INTO project_iucn_action_classification ( + iucn_conservation_action_level_3_subclassification_id, + project_id + ) VALUES ( + ${iucn3_id}, + ${project_id} + ) + RETURNING + project_iucn_action_classification_id as id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert project IUCN data', [ + 'ProjectRepository->insertClassificationDetail', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result.id; + } + + async insertActivity(activityId: number, projectId: number): Promise { + const sqlStatement = SQL` + INSERT INTO project_activity ( + activity_id, + project_id + ) VALUES ( + ${activityId}, + ${projectId} + ) + RETURNING + project_activity_id as id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert project activity data', [ + 'ProjectRepository->insertClassificationDetail', + 'rows was null or undefined, expected rows != null' + ]); + } + + return result.id; + } + + async insertParticipantRole(projectId: number, projectParticipantRole: string): Promise { + const systemUserId = this.connection.systemUserId(); + + if (!systemUserId) { + throw new ApiExecuteSQLError('Failed to identify system user ID'); + } + + const sqlStatement = SQL` + INSERT INTO project_participation ( + project_id, + system_user_id, + project_role_id + ) + ( + SELECT + ${projectId}, + ${systemUserId}, + project_role_id + FROM + project_role + WHERE + name = ${projectParticipantRole} + ) + RETURNING + *; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!response || !response.rowCount) { + throw new ApiExecuteSQLError('Failed to insert project team member', [ + 'ProjectRepository->insertParticipantRole', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async deleteIUCNData(projectId: number): Promise { + const sqlDeleteStatement = SQL` + DELETE + from project_iucn_action_classification + WHERE + project_id = ${projectId}; + `; + + await this.connection.query(sqlDeleteStatement.text, sqlDeleteStatement.values); + } + + async deleteIndigenousPartnershipsData(projectId: number): Promise { + const sqlDeleteStatement = SQL` + DELETE + from project_first_nation + WHERE + project_id = ${projectId}; + `; + + await this.connection.query(sqlDeleteStatement.text, sqlDeleteStatement.values); + } + + async deleteStakeholderPartnershipsData(projectId: number): Promise { + const sqlDeleteStatement = SQL` + DELETE + from stakeholder_partnership + WHERE + project_id = ${projectId}; + `; + + await this.connection.query(sqlDeleteStatement.text, sqlDeleteStatement.values); + } + + async updateProjectData( + projectId: number, + project: PutProjectData | null, + location: PutLocationData | null, + objectives: PutObjectivesData | null, + coordinator: PutCoordinatorData | null, + revision_count: number + ): Promise { + if (!project && !location && !objectives && !coordinator) { + // Nothing to update + throw new ApiExecuteSQLError('Nothing to update for Project Data', [ + 'ProjectRepository->updateProjectData', + 'rows was null or undefined, expected rows != null' + ]); + } + + const sqlStatement: SQLStatement = SQL`UPDATE project SET `; + + const sqlSetStatements: SQLStatement[] = []; + + if (project) { + sqlSetStatements.push(SQL`project_type_id = ${project.type}`); + sqlSetStatements.push(SQL`name = ${project.name}`); + sqlSetStatements.push(SQL`start_date = ${project.start_date}`); + sqlSetStatements.push(SQL`end_date = ${project.end_date}`); + } + + if (location) { + sqlSetStatements.push(SQL`location_description = ${location.location_description}`); + sqlSetStatements.push(SQL`geojson = ${JSON.stringify(location.geometry)}`); + + const geometrySQLStatement = SQL`geography = `; + + if (location.geometry && location.geometry.length) { + const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(location.geometry); + + geometrySQLStatement.append(SQL` + public.geography( + public.ST_Force2D( + public.ST_SetSRID( + `); + + geometrySQLStatement.append(geometryCollectionSQL); + + geometrySQLStatement.append(SQL` + , 4326))) + `); + } else { + geometrySQLStatement.append(SQL`null`); + } + + sqlSetStatements.push(geometrySQLStatement); + } + + if (objectives) { + sqlSetStatements.push(SQL`objectives = ${objectives.objectives}`); + sqlSetStatements.push(SQL`caveats = ${objectives.caveats}`); + } + + if (coordinator) { + sqlSetStatements.push(SQL`coordinator_first_name = ${coordinator.first_name}`); + sqlSetStatements.push(SQL`coordinator_last_name = ${coordinator.last_name}`); + sqlSetStatements.push(SQL`coordinator_email_address = ${coordinator.email_address}`); + sqlSetStatements.push(SQL`coordinator_agency_name = ${coordinator.coordinator_agency}`); + sqlSetStatements.push(SQL`coordinator_public = ${coordinator.share_contact_details}`); + } + + sqlSetStatements.forEach((item, index) => { + sqlStatement.append(item); + if (index < sqlSetStatements.length - 1) { + sqlStatement.append(','); + } + }); + + sqlStatement.append(SQL` + WHERE + project_id = ${projectId} + AND + revision_count = ${revision_count}; + `); + + const result = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!result || !result.rowCount) { + throw new ApiExecuteSQLError('Failed to update stale project data', [ + 'ProjectRepository->updateProjectData', + 'rows was null or undefined, expected rows != null' + ]); + } + } + + async deleteActivityData(projectId: NumberOfAutoScalingGroups): Promise { + const sqlDeleteStatement = SQL` + DELETE FROM + project_activity + WHERE + project_id = ${projectId}; + `; + + await this.connection.query(sqlDeleteStatement.text, sqlDeleteStatement.values); + } + + async deleteProject(projectId: number): Promise { + const sqlStatement = SQL`call api_delete_project(${projectId})`; + + await this.connection.sql(sqlStatement); + } +} diff --git a/api/src/repositories/spatial-repository.test.ts b/api/src/repositories/spatial-repository.test.ts new file mode 100644 index 0000000000..6de60d24c0 --- /dev/null +++ b/api/src/repositories/spatial-repository.test.ts @@ -0,0 +1,256 @@ +import chai, { expect } from 'chai'; +import { FeatureCollection } from 'geojson'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import SQL from 'sql-template-strings'; +import { ApiGeneralError } from '../errors/api-error'; +import * as spatialUtils from '../utils/spatial-utils'; +import { getMockDBConnection } from '../__mocks__/db'; +import { SpatialRepository } from './spatial-repository'; + +chai.use(sinonChai); + +describe('SpatialRepository', () => { + describe('getSpatialTransformRecords', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should succeed with valid data', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [ + { + spatial_transform_id: 1, + name: 'transform name', + description: 'transform description', + notes: 'notes', + transform: 'transform details' + } + ] + } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.getSpatialTransformRecords(); + + expect(response[0].spatial_transform_id).to.equal(1); + expect(response[0].name).to.equal('transform name'); + expect(response[0].description).to.equal('transform description'); + expect(response[0].notes).to.equal('notes'); + expect(response[0].transform).to.equal('transform details'); + }); + }); + + describe('insertSpatialTransformSubmissionRecord', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw an error when insert sql fails', async () => { + const mockQueryResponse = ({ rowCount: 0 } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + try { + await spatialRepository.insertSpatialTransformSubmissionRecord(1, 1); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiGeneralError).message).to.equal( + 'Failed to insert spatial transform submission id and submission spatial component id' + ); + } + }); + + it('should succeed with valid data', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [{ spatial_transform_submission_id: 1 }] } as any) as Promise< + QueryResult + >; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.insertSpatialTransformSubmissionRecord(1, 1); + + expect(response.spatial_transform_submission_id).to.equal(1); + }); + }); + + describe('runSpatialTransformOnSubmissionId', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should succeed with valid data', async () => { + const mockQueryResponse = ({ + rowCount: 1, + rows: [ + { + result_data: { + type: 'FeatureCollection', + features: [] + } as FeatureCollection + } + ] + } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + query: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.runSpatialTransformOnSubmissionId(1, 'string'); + + expect(response).to.eql([ + { + result_data: { + type: 'FeatureCollection', + features: [] + } as FeatureCollection + } + ]); + }); + }); + + describe('insertSubmissionSpatialComponent', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw an error when insert sql fails', async () => { + const mockQueryResponse = ({ rowCount: 0 } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + try { + await spatialRepository.insertSubmissionSpatialComponent(1, {} as FeatureCollection); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiGeneralError).message).to.equal( + 'Failed to insert submission spatial component details' + ); + } + }); + + it('should succeed with valid data', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [{ submission_spatial_component_id: 1 }] } as any) as Promise< + QueryResult + >; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.insertSubmissionSpatialComponent(1, {} as FeatureCollection); + + expect(response.submission_spatial_component_id).to.equal(1); + }); + + it('should succeed with valid data and append geography to sql statement', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [{ submission_spatial_component_id: 1 }] } as any) as Promise< + QueryResult + >; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const generateGeometryCollectionSQLStub = sinon + .stub(spatialUtils, 'generateGeometryCollectionSQL') + .returns(SQL`valid`); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.insertSubmissionSpatialComponent(1, { + type: 'FeatureCollection', + features: [ + { + type: 'Feature', + geometry: { + type: 'Point', + coordinates: [125.6, 10.1] + }, + properties: {} + } + ] + } as FeatureCollection); + + expect(response.submission_spatial_component_id).to.equal(1); + expect(generateGeometryCollectionSQLStub).to.be.calledOnce; + }); + }); + + describe('deleteSpatialComponentsBySubmissionId', () => { + it('should successfully return submission IDs for delete spatial data', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [{ occurrence_submission_id: 2 }] } as any) as Promise< + QueryResult + >; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.deleteSpatialComponentsBySubmissionId(2); + + expect(response[0].occurrence_submission_id).to.equal(2); + }); + }); + + describe('deleteSpatialComponentsSpatialRefsBySubmissionId', () => { + it('should successfully return submission IDs for deleted spatial component reference', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [{ occurrence_submission_id: 2 }] } as any) as Promise< + QueryResult + >; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const spatialRepository = new SpatialRepository(mockDBConnection); + + const response = await spatialRepository.deleteSpatialComponentsSpatialTransformRefsBySubmissionId(2); + + expect(response[0].occurrence_submission_id).to.equal(2); + }); + }); +}); diff --git a/api/src/repositories/spatial-repository.ts b/api/src/repositories/spatial-repository.ts new file mode 100644 index 0000000000..ef79e0f428 --- /dev/null +++ b/api/src/repositories/spatial-repository.ts @@ -0,0 +1,224 @@ +import { FeatureCollection } from 'geojson'; +import SQL from 'sql-template-strings'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { generateGeometryCollectionSQL } from '../utils/spatial-utils'; +import { BaseRepository } from './base-repository'; + +export interface IInsertSpatialTransform { + name: string; + description: string; + notes: string; + transform: string; +} + +export interface IGetSpatialTransformRecord { + spatial_transform_id: number; + name: string; + description: string | null; + notes: string | null; + transform: string; +} + +export interface ITransformSpatialRow { + result_data: FeatureCollection; +} + +export interface ISubmissionSpatialComponent { + submission_spatial_component_ids: number[]; + occurrence_submission_id: number; + spatial_component: FeatureCollection; + geometry: null; + geography: string; +} + +export class SpatialRepository extends BaseRepository { + /** + * get spatial transform records + * + * @param + * @return {*} {Promise} + * @memberof SpatialRepository + */ + async getSpatialTransformRecords(): Promise { + const sqlStatement = SQL` + SELECT + spatial_transform_id, + name, + description, + notes, + transform + FROM + spatial_transform; + `; + + const response = await this.connection.sql(sqlStatement); + + return response.rows; + } + + /** + * Insert record of transform id used for submission spatial component record + * + * @param {number} spatialTransformId + * @param {number} submissionSpatialComponentId + * @return {*} {Promise<{ spatial_transform_submission_id: number }>} + * @memberof SpatialRepository + */ + async insertSpatialTransformSubmissionRecord( + spatialTransformId: number, + submissionSpatialComponentId: number + ): Promise<{ spatial_transform_submission_id: number }> { + const sqlStatement = SQL` + INSERT INTO spatial_transform_submission ( + spatial_transform_id, + submission_spatial_component_id + ) VALUES ( + ${spatialTransformId}, + ${submissionSpatialComponentId} + ) + RETURNING + spatial_transform_submission_id; + `; + + const response = await this.connection.sql<{ spatial_transform_submission_id: number }>(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError( + 'Failed to insert spatial transform submission id and submission spatial component id', + [ + 'SpatialRepository->insertSpatialTransformSubmissionRecord', + 'rowCount was null or undefined, expected rowCount >= 1' + ] + ); + } + return response.rows[0]; + } + + /** + * Run Spatial Transform with transform string on submissionId + * + * @param {number} submissionId + * @param {string} transform + * @return {*} {Promise} + * @memberof SpatialRepository + */ + async runSpatialTransformOnSubmissionId(submissionId: number, transform: string): Promise { + const response = await this.connection.query(transform, [submissionId]); + + return response.rows; + } + + /** + * Insert given transformed data into Spatial Component Table + * + * @param {number} submissionId + * @param {Feature[]} transformedData + * @return {*} {Promise<{ submission_spatial_component_id: number }>} + * @memberof SpatialRepository + */ + async insertSubmissionSpatialComponent( + submissionId: number, + transformedData: FeatureCollection + ): Promise<{ submission_spatial_component_id: number }> { + const sqlStatement = SQL` + INSERT INTO submission_spatial_component ( + occurrence_submission_id, + spatial_component, + geography + ) VALUES ( + ${submissionId}, + ${JSON.stringify(transformedData)} + `; + + if (transformedData.features && transformedData.features.length > 0) { + const geoCollection = generateGeometryCollectionSQL(transformedData.features); + + sqlStatement.append(SQL` + ,public.geography( + public.ST_Force2D( + public.ST_SetSRID( + `); + + sqlStatement.append(geoCollection); + + sqlStatement.append(SQL` + , 4326))) + `); + } else { + sqlStatement.append(SQL` + ,null + `); + } + + sqlStatement.append(SQL` + ) + RETURNING + submission_spatial_component_id; + `); + + const response = await this.connection.sql<{ submission_spatial_component_id: number }>(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to insert submission spatial component details', [ + 'SpatialRepository->insertSubmissionSpatialComponent', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + return response.rows[0]; + } + + /** + * Deletes spatial components in a submission id before updating it with new data + * + * @param {number} occurrence_submission_id + * @return {*} {Promise<{ occurrence_submission_id: number }[]>} + * @memberof SpatialRepository + */ + async deleteSpatialComponentsBySubmissionId( + occurrence_submission_id: number + ): Promise<{ occurrence_submission_id: number }[]> { + const sqlStatement = SQL` + DELETE FROM + submission_spatial_component + WHERE + occurrence_submission_id=${occurrence_submission_id} + RETURNING + occurrence_submission_id; + ;`; + + const response = await this.connection.sql<{ occurrence_submission_id: number }>(sqlStatement); + + return response.rows; + } + + /** + * Remove references in spatial_transform_submission table + * + * @param {number} occurrence_submission_id + * @return {*} {Promise<{ occurrence_submission_id: number }[]>} + * @memberof SpatialRepository + */ + async deleteSpatialComponentsSpatialTransformRefsBySubmissionId( + occurrence_submission_id: number + ): Promise<{ occurrence_submission_id: number }[]> { + const sqlStatement = SQL` + DELETE FROM + spatial_transform_submission + WHERE + submission_spatial_component_id IN ( + SELECT + submission_spatial_component_id + FROM + submission_spatial_component + WHERE + occurrence_submission_id=${occurrence_submission_id} + ) + RETURNING + ${occurrence_submission_id}; + `; + + const response = await this.connection.sql<{ occurrence_submission_id: number }>(sqlStatement); + + return response.rows; + } +} diff --git a/api/src/repositories/submission-repository.test.ts b/api/src/repositories/submission-repository.test.ts new file mode 100644 index 0000000000..74fe872a9b --- /dev/null +++ b/api/src/repositories/submission-repository.test.ts @@ -0,0 +1,52 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { HTTP400 } from '../errors/http-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { SubmissionRepository } from './submission-repository'; + +chai.use(sinonChai); + +describe('SubmissionRepository', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('insertSubmissionStatus', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SubmissionRepository(dbConnection); + const response = await repo.insertSubmissionStatus(1, 'validated'); + + expect(response).to.be.eql(1); + }); + + it('should throw `Failed to update` error', async () => { + const mockResponse = ({} as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SubmissionRepository(dbConnection); + + try { + await repo.insertSubmissionStatus(1, 'validated'); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Rejected'); + } + }); + }); +}); diff --git a/api/src/repositories/submission-repository.ts b/api/src/repositories/submission-repository.ts new file mode 100644 index 0000000000..7255f6744b --- /dev/null +++ b/api/src/repositories/submission-repository.ts @@ -0,0 +1,46 @@ +import SQL from 'sql-template-strings'; +import { SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { SubmissionErrorFromMessageType } from '../utils/submission-error'; +import { BaseRepository } from './base-repository'; + +export class SubmissionRepository extends BaseRepository { + /** + * Insert a record into the submission_status table. + * + * @param {number} occurrenceSubmissionId + * @param {string} submissionStatusType + * @return {*} {Promise} + */ + async insertSubmissionStatus(occurrenceSubmissionId: number, submissionStatusType: string): Promise { + const sqlStatement = SQL` + INSERT INTO submission_status ( + occurrence_submission_id, + submission_status_type_id, + event_timestamp + ) VALUES ( + ${occurrenceSubmissionId}, + ( + SELECT + submission_status_type_id + FROM + submission_status_type + WHERE + name = ${submissionStatusType} + ), + now() + ) + RETURNING + submission_status_id as id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPDATE_OCCURRENCE_SUBMISSION); + } + + return result.id; + } +} diff --git a/api/src/repositories/summary-repository.test.ts b/api/src/repositories/summary-repository.test.ts new file mode 100644 index 0000000000..7286b99e84 --- /dev/null +++ b/api/src/repositories/summary-repository.test.ts @@ -0,0 +1,388 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { SUMMARY_SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { HTTP400 } from '../errors/http-error'; +import { PostSummaryDetails } from '../models/summaryresults-create'; +import { getMockDBConnection } from '../__mocks__/db'; +import { ISummarySubmissionMessagesResponse, SummaryRepository } from './summary-repository'; + +chai.use(sinonChai); + +describe('SummaryRepository', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('findSummarySubmissionById', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [{ id: 1 }] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.findSummarySubmissionById(1); + + expect(response).to.be.eql({ id: 1 }); + }); + + it('should throw a HTTP400 error when the query fails', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ query: () => mockResponse }); + + const repo = new SummaryRepository(dbConnection); + + try { + await repo.findSummarySubmissionById(1); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query survey summary submission table'); + } + }); + }); + + describe('getLatestSurveySummarySubmission', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.getLatestSurveySummarySubmission(1); + + expect(response).to.be.eql({ id: 1 }); + }); + it('should throw a HTTP400 error when the query fails', async () => { + const dbConnection = getMockDBConnection(); + const repo = new SummaryRepository(dbConnection); + + try { + await repo.getLatestSurveySummarySubmission(1); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query survey summary submission table'); + } + }); + }); + + describe('updateSurveySummarySubmissionWithKey', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + survey_summary_submission_id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.updateSurveySummarySubmissionWithKey(1, 'abc'); + + expect(response).to.be.eql({ survey_summary_submission_id: 1 }); + }); + it('should throw a HTTP400 error when the query fails', async () => { + const dbConnection = getMockDBConnection(); + const repo = new SummaryRepository(dbConnection); + + try { + await repo.updateSurveySummarySubmissionWithKey(1, 'abc'); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to update survey summary submission record'); + } + }); + }); + + describe('insertSurveySummarySubmission', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + survey_summary_submission_id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.insertSurveySummarySubmission(1, 'source', 'file_name'); + + expect(response).to.be.eql({ survey_summary_submission_id: 1 }); + }); + it('should throw a HTTP400 error when the query fails', async () => { + // const mockQuery = sinon + // .stub(SummaryRepository.prototype, 'insertSurveySummarySubmission') + // .rejects(new Error('test error')); + + const dbConnection = getMockDBConnection(); + const repo = new SummaryRepository(dbConnection); + + try { + await repo.insertSurveySummarySubmission(1, 'source', 'file_name'); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to insert survey summary submission record'); + } + }); + }); + + describe('insertSurveySummaryDetails', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + survey_summary_detail_id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.insertSurveySummaryDetails(1, ({} as unknown) as PostSummaryDetails); + + expect(response).to.be.eql({ survey_summary_detail_id: 1 }); + }); + + it('should throw a HTTP400 error when the query fails', async () => { + const dbConnection = getMockDBConnection(); + const repo = new SummaryRepository(dbConnection); + + try { + await repo.insertSurveySummaryDetails(1, ({} as unknown) as PostSummaryDetails); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to insert summary details data'); + } + }); + }); + + describe('deleteSummarySubmission', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ rows: [{ delete_timestamp: '2022-02-02' }], rowCount: 1 } as any) as Promise< + QueryResult + >; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.deleteSummarySubmission(1); + + expect(response).to.be.eql((await mockResponse).rowCount); + }); + + it('should throw a HTTP400 error when the query fails', async () => { + const dbConnection = getMockDBConnection(); + const repo = new SummaryRepository(dbConnection); + + try { + await repo.deleteSummarySubmission(1); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to soft delete survey summary submission'); + } + }); + }); + + describe('getSummarySubmissionMessages', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.getSummarySubmissionMessages(1); + + expect(response).to.be.eql([{ id: 1 }]); + }); + it('should throw a HTTP400 error when the query fails', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repo = new SummaryRepository(dbConnection); + + try { + await repo.getSummarySubmissionMessages(1); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query survey summary submission table'); + } + }); + }); + + describe('getSummaryTemplateIdFromNameVersion', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rows: [ + { + summary_template_id: 1 + } + ] + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.getSummaryTemplateIdFromNameVersion('templateName', 'templateVersion'); + + expect(response).to.be.eql({ summary_template_id: 1 }); + }); + it('should throw a HTTP400 error when the query fails', async () => { + const dbConnection = getMockDBConnection(); + const repo = new SummaryRepository(dbConnection); + + try { + await repo.getSummaryTemplateIdFromNameVersion('templateName', 'templateVersion'); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query summary templates table'); + } + }); + }); + + describe('getSummaryTemplateSpeciesRecords', () => { + it('should succeed with valid data (no species)', async () => { + const mockResponse = ({ + rows: ([ + { + summary_template_species_id: 1, + summary_template_id: 1, + wldtaxonomic_units_id: 1, + validation: 'validation_schema', + create_user: 1, + revision_count: 1 + } + ] as unknown) as ISummarySubmissionMessagesResponse[] + } as any) as Promise>; + + const dbConnection = getMockDBConnection({ + knex: async () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + sinon + .stub(SummaryRepository.prototype, 'getSummaryTemplateIdFromNameVersion') + .resolves({ summary_template_id: 1 }); + + const response = await repo.getSummaryTemplateSpeciesRecords('templateName', 'templateVersion'); + + expect(response).to.be.eql((await mockResponse).rows); + }); + + it('should succeed with valid data (with species)', async () => { + const mockResponse = ({ + rows: ([ + { + summary_template_species_id: 1, + summary_template_id: 1, + wldtaxonomic_units_id: 1, + validation: 'validation_schema', + create_user: 1, + revision_count: 1 + } + ] as unknown) as ISummarySubmissionMessagesResponse[] + } as any) as Promise>; + + const dbConnection = getMockDBConnection({ + knex: async () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + sinon + .stub(SummaryRepository.prototype, 'getSummaryTemplateIdFromNameVersion') + .resolves({ summary_template_id: 1 }); + const response = await repo.getSummaryTemplateSpeciesRecords('templateName', 'templateVersion', [1, 2]); + + expect(response).to.be.eql((await mockResponse).rows); + }); + it('should throw a HTTP400 error when the query fails', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + sinon + .stub(SummaryRepository.prototype, 'getSummaryTemplateIdFromNameVersion') + .resolves({ summary_template_id: 1 }); + + const repo = new SummaryRepository(dbConnection); + + try { + await repo.getSummaryTemplateSpeciesRecords('templateName', 'templateVersion', [1, 2]); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query summary template species table'); + } + }); + }); + + describe('insertSummarySubmissionMessage', () => { + it('should succeed with valid data', async () => { + const mockResponse = ({ + rowCount: 1 + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + const response = await repo.insertSummarySubmissionMessage( + 1, + SUMMARY_SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, + 'message' + ); + + expect(response).to.be.eql((await mockResponse).rows); + }); + + it('should throw an API error when the query fails', async () => { + const mockResponse = ({ + rowCount: 0 + } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: () => mockResponse + }); + + const repo = new SummaryRepository(dbConnection); + + try { + await repo.insertSummarySubmissionMessage(1, SUMMARY_SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, 'message'); + + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to insert summary submission message record'); + } + }); + }); +}); diff --git a/api/src/repositories/summary-repository.ts b/api/src/repositories/summary-repository.ts new file mode 100644 index 0000000000..9b4a7e5b6e --- /dev/null +++ b/api/src/repositories/summary-repository.ts @@ -0,0 +1,467 @@ +import SQL from 'sql-template-strings'; +import { MESSAGE_CLASS_NAME, SUMMARY_SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { getKnex } from '../database/db'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { HTTP400 } from '../errors/http-error'; +import { PostSummaryDetails } from '../models/summaryresults-create'; +import { getLogger } from '../utils/logger'; +import { BaseRepository } from './base-repository'; + +export interface ISummaryTemplateSpeciesData { + summary_template_species_id: number; + summary_template_id: number; + wldtaxonomic_units_id: number | null; + validation: string; + create_user: number; + update_date: string | null; + update_user: number | null; + revision_count: number; +} + +export interface ISurveySummaryDetails { + id: number; + key: string; + file_name: string; + delete_timestamp: string | null; + submission_message_type_id: number; + message: string; + submission_message_type_name: string; + summary_submission_message_class_id: number; + submission_message_class_name: MESSAGE_CLASS_NAME; +} + +export interface ISummarySubmissionResponse { + survey_summary_submission_id: number; + survey_id: number; + source: string; + event_timestamp: string | null; + delete_timestamp: string | null; + key: string; + file_name: string; + create_user: number; + update_date: string | null; + update_user: number | null; + revision_count: number; + summary_template_species_id: number | null; +} + +export interface ISummarySubmissionMessagesResponse { + id: number; + class: string; + type: string; + message: string; +} + +const defaultLog = getLogger('repositories/summary-repository'); + +export class SummaryRepository extends BaseRepository { + /** + * Query to find the record for a single summary submission by summarySubmissionId. + * + * @param {number} summarySubmissionId + * @returns {Promise} The summary submission record + */ + async findSummarySubmissionById(summarySubmissionId: number): Promise { + const sqlStatement = SQL` + SELECT + * + FROM + survey_summary_submission sss + WHERE + sss.survey_summary_submission_id = ${summarySubmissionId} + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!response) { + throw new HTTP400('Failed to query survey summary submission table'); + } + + return response && response.rows && response.rows[0]; + } + + /** + * Finds the latest summary submission for a given survey. + * + * @param {number} surveyId the ID of the survey + * @returns {{Promise}} the latest survey summary record for the given survey + */ + async getLatestSurveySummarySubmission(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + sss.survey_summary_submission_id as id, + sss.key, + sss.file_name, + sss.delete_timestamp, + sssm.submission_message_type_id, + sssm.message, + ssmt.name as submission_message_type_name, + ssmt.summary_submission_message_class_id, + ssmc.name as submission_message_class_name + FROM + survey_summary_submission as sss + LEFT OUTER JOIN + survey_summary_submission_message as sssm + ON + sss.survey_summary_submission_id = sssm.survey_summary_submission_id + LEFT OUTER JOIN + summary_submission_message_type as ssmt + ON + sssm.submission_message_type_id = ssmt.submission_message_type_id + LEFT OUTER JOIN + summary_submission_message_class as ssmc + ON + ssmt.summary_submission_message_class_id = ssmc.summary_submission_message_class_id + WHERE + sss.survey_id = ${surveyId} + ORDER BY + sss.event_timestamp DESC + LIMIT 1; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (!response) { + throw new HTTP400('Failed to query survey summary submission table'); + } + + return response && response.rows && response.rows[0]; + } + + /** + * Updates a survey summary submission record with an S3 key. + * + * @param {number} summarySubmissionId the ID of the record to update + * @param {string} key S3 key + * @return {Promise<{ survey_summary_submission_id: number }>} The ID of the updated record + */ + async updateSurveySummarySubmissionWithKey( + summarySubmissionId: number, + key: string + ): Promise<{ survey_summary_submission_id: number }> { + const sqlStatement = SQL` + UPDATE survey_summary_submission + SET + key = ${key} + WHERE + survey_summary_submission_id = ${summarySubmissionId} + RETURNING survey_summary_submission_id; + `; + + const response = await this.connection.query<{ survey_summary_submission_id: number }>( + sqlStatement.text, + sqlStatement.values + ); + + if (!response) { + throw new HTTP400('Failed to update survey summary submission record'); + } + + return response && response.rows && response.rows[0]; + } + + /** + * Inserts a survey summary submission record. + * + * @param {number} surveyId the ID of the survey. + * @param {string} source the source of the record. + * @param {string} file_name the file name of the submission. + * @return {Promise<{ survey_summary_submission_id: number }>} the ID of the inserted record. + */ + async insertSurveySummarySubmission( + surveyId: number, + source: string, + file_name: string + ): Promise<{ survey_summary_submission_id: number }> { + const sqlStatement = SQL` + INSERT INTO survey_summary_submission ( + survey_id, + source, + file_name, + event_timestamp + ) VALUES ( + ${surveyId}, + ${source}, + ${file_name}, + now() + ) + RETURNING survey_summary_submission_id; + `; + + const response = await this.connection.query<{ survey_summary_submission_id: number }>( + sqlStatement.text, + sqlStatement.values + ); + + if (!response) { + throw new HTTP400('Failed to insert survey summary submission record'); + } + + return response && response.rows && response.rows[0]; + } + + /** + * Inserts a record for survey summary details. + * + * @param {number} summarySubmissionId the ID of the summary submission + * @param {string} summaryDetails the details being inserted + * @return {Promise<{ survey_summary_detail_id: number }>} the ID of the details record. + */ + async insertSurveySummaryDetails( + summarySubmissionId: number, + summaryDetails: PostSummaryDetails + ): Promise<{ survey_summary_detail_id: number }> { + const sqlStatement = SQL` + INSERT INTO survey_summary_detail ( + survey_summary_submission_id, + study_area_id, + population_unit, + block_sample_unit_id, + parameter, + stratum, + observed, + estimated, + sightability_model, + sightability_correction, + standard_error, + coefficient_variation, + confidence_level_percent, + confidence_limit_lower, + confidence_limit_upper, + total_area_surveyed_sqm, + area_flown, + total_kilometers_surveyed, + best_parameter_flag, + outlier_blocks_removed, + total_marked_animals_observed, + marked_animals_available, + parameter_comments + ) VALUES ( + ${summarySubmissionId}, + ${summaryDetails.study_area_id}, + ${summaryDetails.population_unit}, + ${summaryDetails.block_sample_unit_id}, + ${summaryDetails.parameter}, + ${summaryDetails.stratum}, + ${summaryDetails.observed}, + ${summaryDetails.estimated}, + ${summaryDetails.sightability_model}, + ${summaryDetails.sightability_correction_factor}, + ${summaryDetails.standard_error}, + ${summaryDetails.coefficient_variation}, + ${summaryDetails.confidence_level_percent}, + ${summaryDetails.confidence_limit_lower}, + ${summaryDetails.confidence_limit_upper}, + ${summaryDetails.total_area_survey_sqm}, + ${summaryDetails.area_flown}, + ${summaryDetails.total_kilometers_surveyed}, + ${summaryDetails.best_parameter_flag}, + ${summaryDetails.outlier_blocks_removed}, + ${summaryDetails.total_marked_animals_observed}, + ${summaryDetails.marked_animals_available}, + ${summaryDetails.parameter_comments} + ) + RETURNING survey_summary_detail_id; + `; + + const response = await this.connection.query<{ survey_summary_detail_id: number }>( + sqlStatement.text, + sqlStatement.values + ); + + if (!response) { + throw new HTTP400('Failed to insert summary details data'); + } + + return response && response.rows && response.rows[0]; + } + + /** + * Soft deletes a summary submission entry by ID + * + * @param {number} summarySubmissionId the ID of the summary submission + * @returns {Promise} row count if delete is successful, null otherwise. + */ + async deleteSummarySubmission(summarySubmissionId: number): Promise { + const sqlStatement = SQL` + UPDATE + survey_summary_submission + SET + delete_timestamp = now() + WHERE + survey_summary_submission_id = ${summarySubmissionId} + RETURNING + delete_timestamp; + `; + + const response = await this.connection.query<{ delete_timestamp: string }>(sqlStatement.text, sqlStatement.values); + + if (!response || !response?.rows[0]?.delete_timestamp) { + throw new HTTP400('Failed to soft delete survey summary submission'); + } + + return (response && response.rowCount) || null; + } + + /** + * Retreives the list of messages for a summary submission. + * + * @param {number} summarySubmissionId the ID of the summary submission. + * @returns {Promise} all messages for the given summary submission. + */ + async getSummarySubmissionMessages(summarySubmissionId: number): Promise { + const sqlStatement = SQL` + SELECT + sssm.submission_message_id as id, + sssm.message, + ssmt.name as type, + ssmc.name as class + FROM + survey_summary_submission as sss + LEFT OUTER JOIN + survey_summary_submission_message as sssm + ON + sssm.survey_summary_submission_id = sss.survey_summary_submission_id + LEFT OUTER JOIN + summary_submission_message_type as ssmt + ON + ssmt.submission_message_type_id = sssm.submission_message_type_id + LEFT OUTER JOIN + summary_submission_message_class as ssmc + ON + ssmc.summary_submission_message_class_id = ssmt.summary_submission_message_class_id + WHERE + sss.survey_summary_submission_id = ${summarySubmissionId} + ORDER BY + sssm.submission_message_id; + `; + + const response = await this.connection.query( + sqlStatement.text, + sqlStatement.values + ); + + if (!response) { + throw new HTTP400('Failed to query survey summary submission table'); + } + + return response && response.rows; + } + + /** + * Retreives the ID of a summary template based on its name and version number. + * @param templateName + * @param templateVersion + * @returns + */ + async getSummaryTemplateIdFromNameVersion( + templateName: string, + templateVersion: string + ): Promise<{ summary_template_id: number }> { + const sqlStatement = SQL` + SELECT + st.summary_template_id + FROM + summary_template st + WHERE + st.name = ${templateName} + AND + st.version = ${templateVersion} + ; + `; + + const response = await this.connection.query<{ summary_template_id: number }>( + sqlStatement.text, + sqlStatement.values + ); + + if (!response) { + throw new HTTP400('Failed to query summary templates table'); + } + + return response && response.rows && response.rows[0]; + } + + /** + * Reetrieves all summary template species records that are constrained by the given + * template name, version and survey focal species. + * @param {number} templateName The name of the template. + * @param {number} templateVersion The version of the template. + * @param {number} [species] the wild taxonomic species code. + * @returns {ISummaryTemplateSpeciesData[]} + */ + async getSummaryTemplateSpeciesRecords( + templateName: string, + templateVersion: string, + species?: number[] + ): Promise { + const templateRow = await this.getSummaryTemplateIdFromNameVersion(templateName, templateVersion); + + const queryBuilder = getKnex() + .select() + .fromRaw('summary_template_species sts') + .where('sts.summary_template_id', templateRow.summary_template_id) + .andWhere((qb) => { + qb.whereIn('sts.wldtaxonomic_units_id', species || []); + }) + .orWhere('sts.wldtaxonomic_units_id', null); + + const response = await this.connection.knex(queryBuilder); + + if (!response) { + throw new HTTP400('Failed to query summary template species table'); + } + + return response.rows; + } + + /** + * Insert a record into the survey_summary_submission_message table. + * @param summarySubmissionId the ID of the summary submission record. + * @param summarySubmissionMessageType the message type. + * @param summarySubmissionMessage the full message. + */ + async insertSummarySubmissionMessage( + summarySubmissionId: number, + summarySubmissionMessageType: SUMMARY_SUBMISSION_MESSAGE_TYPE, + summarySubmissionMessage: string + ): Promise { + defaultLog.debug({ + label: 'insertSummarySubmissionMessage', + summarySubmissionId, + summarySubmissionMessageType, + summarySubmissionMessage + }); + const sqlStatement = SQL` + INSERT INTO survey_summary_submission_message ( + survey_summary_submission_id, + submission_message_type_id, + event_timestamp, + message + ) VALUES ( + ${summarySubmissionId}, + ( + SELECT + submission_message_type_id + FROM + summary_submission_message_type + WHERE + name = ${summarySubmissionMessageType} + ), + now(), + ${summarySubmissionMessage} + ) + RETURNING + submission_message_id; + `; + + const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to insert summary submission message record', [ + 'ErrorRepository->insertSummarySubmissionMessage', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + } +} diff --git a/api/src/repositories/survey-repository.test.ts b/api/src/repositories/survey-repository.test.ts new file mode 100644 index 0000000000..c12bba1e11 --- /dev/null +++ b/api/src/repositories/survey-repository.test.ts @@ -0,0 +1,935 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { GetReportAttachmentsData } from '../models/project-view'; +import { PostProprietorData, PostSurveyObject } from '../models/survey-create'; +import { PutSurveyObject } from '../models/survey-update'; +import { + GetAttachmentsData, + GetSurveyData, + GetSurveyFundingSources, + GetSurveyLocationData, + GetSurveyProprietorData, + GetSurveyPurposeAndMethodologyData +} from '../models/survey-view'; +import { getMockDBConnection } from '../__mocks__/db'; +import { + IObservationSubmissionInsertDetails, + IObservationSubmissionUpdateDetails, + SurveyRepository +} from './survey-repository'; + +chai.use(sinonChai); + +describe('SurveyRepository', () => { + afterEach(() => { + sinon.restore(); + }); + describe('deleteSurvey', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.deleteSurvey(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('getSurveyIdsByProjectId', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyIdsByProjectId(1); + + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getSurveyIdsByProjectId(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project survey ids'); + } + }); + }); + + describe('getSurveyData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyData(1); + + expect(response).to.eql(new GetSurveyData({ id: 1 })); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getSurveyData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get project survey details data'); + } + }); + }); + + describe('getSpeciesData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSpeciesData(1); + + expect(response).to.eql([{ id: 1 }]); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getSpeciesData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey species data'); + } + }); + }); + + describe('getSurveyPurposeAndMethodology', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyPurposeAndMethodology(1); + + expect(response).to.eql(new GetSurveyPurposeAndMethodologyData({ id: 1 })); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getSurveyPurposeAndMethodology(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey purpose and methodology data'); + } + }); + }); + + describe('getSurveyFundingSourcesData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyFundingSourcesData(1); + + expect(response).to.eql(new GetSurveyFundingSources([{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getSurveyFundingSourcesData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey funding sources data'); + } + }); + }); + + describe('getSurveyProprietorDataForView', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyProprietorDataForView(1); + + expect(response).to.eql(new GetSurveyProprietorData({ id: 1 })); + }); + + it('should return Null', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyProprietorDataForView(1); + + expect(response).to.eql(null); + }); + }); + + describe('getSurveyLocationData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSurveyLocationData(1); + + expect(response).to.eql(new GetSurveyLocationData({ id: 1 })); + }); + }); + + describe('getOccurrenceSubmissionId', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getOccurrenceSubmissionId(1); + + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getOccurrenceSubmissionId(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get survey Occurrence submission Id'); + } + }); + }); + + describe('getLatestSurveyOccurrenceSubmission', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getLatestSurveyOccurrenceSubmission(1); + + expect(response).to.eql({ id: 1 }); + }); + + it('should return Null', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getLatestSurveyOccurrenceSubmission(1); + + expect(response).to.eql(null); + }); + }); + + describe('getSummaryResultId', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getSummaryResultId(1); + + expect(response).to.eql({ id: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getSummaryResultId(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get summary result id'); + } + }); + }); + + describe('getAttachmentsData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getAttachmentsData(1); + + expect(response).to.eql(new GetAttachmentsData([{ id: 1 }])); + }); + + it('should return Null', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getAttachmentsData(1); + + expect(response).to.eql(new GetAttachmentsData(undefined)); + }); + }); + + describe('getReportAttachmentsData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getReportAttachmentsData(1); + + expect(response).to.eql(new GetReportAttachmentsData([{ id: 1 }])); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.getReportAttachmentsData(1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to get attachments data'); + } + }); + }); + + describe('insertSurveyData', () => { + it('should return result and add the geometry', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_details: { + survey_name: 'name', + start_date: 'start', + end_date: 'end', + biologist_first_name: 'first', + biologist_last_name: 'last' + }, + purpose_and_methodology: { + field_method_id: 1, + additional_details: '', + ecological_season_id: 1, + intended_outcome_id: 1, + surveyed_all_areas: 'Y' + }, + location: { geometry: [{ id: 1 }] } + } as unknown) as PostSurveyObject; + + const response = await repository.insertSurveyData(1, input); + + expect(response).to.eql(1); + }); + + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_details: { + survey_name: 'name', + start_date: 'start', + end_date: 'end', + biologist_first_name: 'first', + biologist_last_name: 'last' + }, + purpose_and_methodology: { + field_method_id: 1, + additional_details: '', + ecological_season_id: 1, + intended_outcome_id: 1, + surveyed_all_areas: 'Y' + }, + location: { geometry: [] } + } as unknown) as PostSurveyObject; + + const response = await repository.insertSurveyData(1, input); + + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_details: { + survey_name: 'name', + start_date: 'start', + end_date: 'end', + biologist_first_name: 'first', + biologist_last_name: 'last' + }, + purpose_and_methodology: { + field_method_id: 1, + additional_details: '', + ecological_season_id: 1, + intended_outcome_id: 1, + surveyed_all_areas: 'Y' + }, + location: { geometry: [{ id: 1 }] } + } as unknown) as PostSurveyObject; + + try { + await repository.insertSurveyData(1, input); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey data'); + } + }); + }); + + describe('insertFocalSpecies', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.insertFocalSpecies(1, 1); + + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.insertFocalSpecies(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert focal species data'); + } + }); + }); + + describe('insertAncillarySpecies', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.insertAncillarySpecies(1, 1); + + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.insertAncillarySpecies(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert ancillary species data'); + } + }); + }); + + describe('insertVantageCodes', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.insertVantageCodes(1, 1); + + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.insertVantageCodes(1, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert vantage codes'); + } + }); + }); + + describe('insertSurveyProprietor', () => { + it('should return undefined if data is not proprietary', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_data_proprietary: false + } as unknown) as PostProprietorData; + + const response = await repository.insertSurveyProprietor(input, 1); + + expect(response).to.eql(undefined); + }); + + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_data_proprietary: true, + prt_id: 1, + fn_id: 1, + rationale: 'ratio', + proprietor_name: 'name', + disa_required: false + } as unknown) as PostProprietorData; + + const response = await repository.insertSurveyProprietor(input, 1); + + expect(response).to.eql(1); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_data_proprietary: true, + prt_id: 1, + fn_id: 1, + rationale: 'ratio', + proprietor_name: 'name', + disa_required: false + } as unknown) as PostProprietorData; + + try { + await repository.insertSurveyProprietor(input, 1); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey proprietor data'); + } + }); + }); + + describe('associateSurveyToPermit', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.associateSurveyToPermit(1, 1, '1'); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.associateSurveyToPermit(1, 1, '1'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update survey permit record'); + } + }); + }); + + describe('insertSurveyPermit', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.insertSurveyPermit(1, 1, 1, 'number', 'type'); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.insertSurveyPermit(1, 1, 1, 'number', 'type'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey permit record'); + } + }); + }); + + describe('insertSurveyFundingSource', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.insertSurveyFundingSource(1, 1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteSurveySpeciesData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.deleteSurveySpeciesData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('unassociatePermitFromSurvey', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.unassociatePermitFromSurvey(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteSurveyFundingSourcesData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.deleteSurveyFundingSourcesData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteSurveyProprietorData', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.deleteSurveyProprietorData(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('deleteSurveyVantageCodes', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.deleteSurveyVantageCodes(1); + + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveyDetailsData', () => { + it('should return undefined and ue all inputs', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_details: { + name: 'name', + start_date: 'start', + end_date: 'end', + lead_first_name: 'first', + lead_last_name: 'last', + revision_count: 1 + }, + purpose_and_methodology: { + field_method_id: 1, + additional_details: '', + ecological_season_id: 1, + intended_outcome_id: 1, + surveyed_all_areas: 'Y', + revision_count: 1 + }, + location: { geometry: [{ id: 1 }] } + } as unknown) as PutSurveyObject; + + const response = await repository.updateSurveyDetailsData(1, input); + + expect(response).to.eql(undefined); + }); + + it('should return undefined and ue all inputs', async () => { + const mockResponse = ({ rows: [{ id: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_details: { + name: 'name', + start_date: 'start', + end_date: 'end', + lead_first_name: 'first', + lead_last_name: 'last', + revision_count: 1 + }, + purpose_and_methodology: { + field_method_id: 1, + additional_details: '', + ecological_season_id: 1, + intended_outcome_id: 1, + surveyed_all_areas: 'Y', + revision_count: 1 + }, + location: { geometry: [] } + } as unknown) as PutSurveyObject; + + const response = await repository.updateSurveyDetailsData(1, input); + + expect(response).to.eql(undefined); + }); + + it('should throw an error', async () => { + const mockResponse = ({ rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const input = ({ + survey_details: { + name: 'name', + start_date: 'start', + end_date: 'end', + lead_first_name: 'first', + lead_last_name: 'last', + revision_count: 1 + }, + purpose_and_methodology: { + field_method_id: 1, + additional_details: '', + ecological_season_id: 1, + intended_outcome_id: 1, + surveyed_all_areas: 'Y', + revision_count: 1 + }, + location: { geometry: [] } + } as unknown) as PutSurveyObject; + + try { + await repository.updateSurveyDetailsData(1, input); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update survey data'); + } + }); + }); + + describe('getOccurrenceSubmissionMessages', () => { + it('should return result', async () => { + const mockResponse = ({ + rows: [ + { + id: 1, + type: 'type', + status: 'status', + class: 'class', + message: 'message' + } + ], + rowCount: 1 + } as any) as Promise>; + + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getOccurrenceSubmissionMessages(1); + + expect(response).to.eql([ + { + id: 1, + type: 'type', + status: 'status', + class: 'class', + message: 'message' + } + ]); + }); + + it('should return empty array', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ sql: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.getOccurrenceSubmissionMessages(1); + + expect(response).to.eql([]); + }); + }); + + describe('insertSurveyOccurrenceSubmission', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ submissionId: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.insertSurveyOccurrenceSubmission({ + surveyId: 1 + } as IObservationSubmissionInsertDetails); + + expect(response).to.eql({ submissionId: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.insertSurveyOccurrenceSubmission({ surveyId: 1 } as IObservationSubmissionInsertDetails); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to insert survey occurrence submission'); + } + }); + }); + + describe('updateSurveyOccurrenceSubmission', () => { + it('should return result', async () => { + const mockResponse = ({ rows: [{ submissionId: 1 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.updateSurveyOccurrenceSubmission({ + submissionId: 1 + } as IObservationSubmissionUpdateDetails); + + expect(response).to.eql({ submissionId: 1 }); + }); + + it('should throw an error', async () => { + const mockResponse = (undefined as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await repository.updateSurveyOccurrenceSubmission({ submissionId: 1 } as IObservationSubmissionUpdateDetails); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to update survey occurrence submission'); + } + }); + }); + + describe('deleteOccurrenceSubmission', () => { + it('should return 1 upon success', async () => { + const mockResponse = ({ rows: [{ submissionId: 2 }], rowCount: 1 } as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + const response = await repository.deleteOccurrenceSubmission(2); + + expect(response).to.eql(1); + }); + + it('should throw an error upon failure', async () => { + const mockResponse = ({ rows: [], rowCount: 0 } as any) as Promise>; + const dbConnection = getMockDBConnection({ knex: () => mockResponse }); + + const repository = new SurveyRepository(dbConnection); + + try { + await await repository.deleteOccurrenceSubmission(2); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.equal('Failed to delete survey occurrence submission'); + } + }); + }); +}); diff --git a/api/src/repositories/survey-repository.ts b/api/src/repositories/survey-repository.ts new file mode 100644 index 0000000000..0d0140f27e --- /dev/null +++ b/api/src/repositories/survey-repository.ts @@ -0,0 +1,993 @@ +import SQL from 'sql-template-strings'; +import { MESSAGE_CLASS_NAME, SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { getKnex } from '../database/db'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { PostProprietorData, PostSurveyObject } from '../models/survey-create'; +import { PutSurveyObject } from '../models/survey-update'; +import { + GetAttachmentsData, + GetReportAttachmentsData, + GetSurveyData, + GetSurveyFundingSources, + GetSurveyLocationData, + GetSurveyProprietorData, + GetSurveyPurposeAndMethodologyData +} from '../models/survey-view'; +import { queries } from '../queries/queries'; +import { getLogger } from '../utils/logger'; +import { BaseRepository } from './base-repository'; + +export interface IGetSpeciesData { + wldtaxonomic_units_id: string; + is_focal: boolean; +} + +export interface IGetLatestSurveyOccurrenceSubmission { + id: number; + survey_id: number; + source: string; + delete_timestamp: string; + event_timestamp: string; + input_key: string; + input_file_name: string; + output_key: string; + output_file_name: string; + submission_status_id: number; + submission_status_type_id: number; + submission_status_type_name?: SUBMISSION_STATUS_TYPE; + submission_message_id: number; + submission_message_type_id: number; + message: string; + submission_message_type_name: string; +} + +export interface IOccurrenceSubmissionMessagesResponse { + id: number; + class: MESSAGE_CLASS_NAME; + type: SUBMISSION_MESSAGE_TYPE; + status: SUBMISSION_STATUS_TYPE; + message: string; +} + +export interface IObservationSubmissionInsertDetails { + surveyId: number; + source: string; + inputFileName?: string; + inputKey?: string; + outputFileName?: string; + outputKey?: string; +} + +export interface IObservationSubmissionUpdateDetails { + submissionId: number; + inputFileName?: string; + inputKey?: string; + outputFileName?: string; + outputKey?: string; +} + +const defaultLog = getLogger('repositories/survey-repository'); + +export class SurveyRepository extends BaseRepository { + async deleteSurvey(surveyId: number): Promise { + const sqlStatement = SQL`call api_delete_survey(${surveyId})`; + + await this.connection.sql(sqlStatement); + } + + async getSurveyIdsByProjectId(projectId: number): Promise<{ id: number }[]> { + const sqlStatement = SQL` + SELECT + survey_id as id + FROM + survey + WHERE + project_id = ${projectId}; + `; + + const response = await this.connection.sql<{ id: number }>(sqlStatement); + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project survey ids', [ + 'SurveyRepository->getSurveyIdsByProjectId', + 'response was null or undefined, expected response != null' + ]); + } + + return response.rows; + } + + async getSurveyData(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + * + FROM + survey + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows?.[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get project survey details data', [ + 'SurveyRepository->getSurveyData', + 'response was null or undefined, expected response != null' + ]); + } + + return new GetSurveyData(result); + } + + async getSpeciesData(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + wldtaxonomic_units_id, + is_focal + FROM + study_species + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get survey species data', [ + 'SurveyRepository->getSpeciesData', + 'response was null or undefined, expected response != null' + ]); + } + + return result; + } + + async getSurveyPurposeAndMethodology(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + s.field_method_id, + s.additional_details, + s.ecological_season_id, + s.intended_outcome_id, + array_remove(array_agg(sv.vantage_id), NULL) as vantage_ids + FROM + survey s + LEFT OUTER JOIN + survey_vantage sv + ON + sv.survey_id = s.survey_id + WHERE + s.survey_id = ${surveyId} + GROUP BY + s.field_method_id, + s.additional_details, + s.ecological_season_id, + s.intended_outcome_id; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get survey purpose and methodology data', [ + 'SurveyRepository->getSurveyPurposeAndMethodology', + 'response was null or undefined, expected response != null' + ]); + } + + return new GetSurveyPurposeAndMethodologyData(result); + } + + async getSurveyFundingSourcesData(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + sfs.project_funding_source_id, + fs.funding_source_id, + pfs.funding_source_project_id, + pfs.funding_amount::numeric::int, + pfs.funding_start_date, + pfs.funding_end_date, + iac.investment_action_category_id, + iac.name as investment_action_category_name, + fs.name as agency_name + FROM + survey as s + RIGHT OUTER JOIN + survey_funding_source as sfs + ON + sfs.survey_id = s.survey_id + RIGHT OUTER JOIN + project_funding_source as pfs + ON + pfs.project_funding_source_id = sfs.project_funding_source_id + RIGHT OUTER JOIN + investment_action_category as iac + ON + pfs.investment_action_category_id = iac.investment_action_category_id + RIGHT OUTER JOIN + funding_source as fs + ON + iac.funding_source_id = fs.funding_source_id + WHERE + s.survey_id = ${surveyId} + GROUP BY + sfs.project_funding_source_id, + fs.funding_source_id, + pfs.funding_source_project_id, + pfs.funding_amount, + pfs.funding_start_date, + pfs.funding_end_date, + iac.investment_action_category_id, + iac.name, + fs.name + ORDER BY + pfs.funding_start_date; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get survey funding sources data', [ + 'SurveyRepository->getSurveyFundingSourcesData', + 'response was null or undefined, expected response != null' + ]); + } + + return new GetSurveyFundingSources(result); + } + + async getSurveyProprietorDataForView(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + prt.name as proprietor_type_name, + prt.proprietor_type_id, + fn.name as first_nations_name, + fn.first_nations_id, + sp.rationale as category_rationale, + CASE + WHEN sp.proprietor_name is not null THEN sp.proprietor_name + WHEN fn.first_nations_id is not null THEN fn.name + END as proprietor_name, + sp.disa_required, + sp.revision_count + from + survey_proprietor as sp + left outer join proprietor_type as prt + on sp.proprietor_type_id = prt.proprietor_type_id + left outer join first_nations as fn + on sp.first_nations_id is not null + and sp.first_nations_id = fn.first_nations_id + where + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows?.[0]) || null; + + if (!result) { + return result; + } + + return new GetSurveyProprietorData(result); + } + + async getSurveyLocationData(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + * + FROM + survey + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows?.[0]) || null; + + return new GetSurveyLocationData(result); + } + + async getOccurrenceSubmissionId(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + max(occurrence_submission_id) as id + FROM + occurrence_submission + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows?.[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get survey Occurrence submission Id', [ + 'SurveyRepository->getOccurrenceSubmissionId', + 'response was null or undefined, expected response != null' + ]); + } + return result; + } + + async getLatestSurveyOccurrenceSubmission(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + os.occurrence_submission_id as id, + os.survey_id, + os.source, + os.delete_timestamp, + os.event_timestamp, + os.input_key, + os.input_file_name, + os.output_key, + os.output_file_name, + ss.submission_status_id, + ss.submission_status_type_id, + sst.name as submission_status_type_name, + sm.submission_message_id, + sm.submission_message_type_id, + sm.message, + smt.name as submission_message_type_name + FROM + occurrence_submission as os + LEFT OUTER JOIN + submission_status as ss + ON + os.occurrence_submission_id = ss.occurrence_submission_id + LEFT OUTER JOIN + submission_status_type as sst + ON + sst.submission_status_type_id = ss.submission_status_type_id + LEFT OUTER JOIN + submission_message as sm + ON + sm.submission_status_id = ss.submission_status_id + LEFT OUTER JOIN + submission_message_type as smt + ON + smt.submission_message_type_id = sm.submission_message_type_id + WHERE + os.survey_id = ${surveyId} + ORDER BY + os.event_timestamp DESC, ss.submission_status_id DESC + LIMIT 1 + ; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows?.[0]) || null; + + return result; + } + + /** + * SQL query to get the list of messages for an occurrence submission. + * + * @param {number} submissionId The ID of the submission + * @returns {*} Promise Promsie resolving the array of submission messages + */ + async getOccurrenceSubmissionMessages(submissionId: number): Promise { + const sqlStatement = SQL` + SELECT + sm.submission_message_id as id, + smt.name as type, + sst.name as status, + smc.name as class, + sm.message + FROM + occurrence_submission as os + LEFT OUTER JOIN + submission_status as ss + ON + os.occurrence_submission_id = ss.occurrence_submission_id + LEFT OUTER JOIN + submission_status_type as sst + ON + sst.submission_status_type_id = ss.submission_status_type_id + LEFT OUTER JOIN + submission_message as sm + ON + sm.submission_status_id = ss.submission_status_id + LEFT OUTER JOIN + submission_message_type as smt + ON + smt.submission_message_type_id = sm.submission_message_type_id + LEFT OUTER JOIN + submission_message_class smc + ON + smc.submission_message_class_id = smt.submission_message_class_id + WHERE + os.occurrence_submission_id = ${submissionId} + AND + sm.submission_message_id IS NOT NULL + ORDER BY sm.submission_message_id; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response?.rows) { + throw new ApiExecuteSQLError('Failed to get occurrence submission messages', [ + 'SurveyRepository->getOccurrenceSubmissionMessages', + 'response was null or undefined, expected response != null' + ]); + } + + return response.rows; + } + + async getSummaryResultId(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + max(survey_summary_submission_id) as id + FROM + survey_summary_submission + WHERE + survey_id = ${surveyId}; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows?.[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get summary result id', [ + 'SurveyRepository->getSummaryResultId', + 'response was null or undefined, expected response != null' + ]); + } + + return result; + } + + async getAttachmentsData(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + * + FROM + survey_attachment + WHERE + survey_id = ${surveyId}; + `; + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + return new GetAttachmentsData(result); + } + + async getReportAttachmentsData(surveyId: number): Promise { + const sqlStatement = SQL` + SELECT + pra.survey_report_attachment_id + , pra.survey_id + , pra.file_name + , pra.title + , pra.description + , pra.year + , pra."key" + , pra.file_size + , array_remove(array_agg(pra2.first_name ||' '||pra2.last_name), null) authors + FROM + survey_report_attachment pra + LEFT JOIN survey_report_author pra2 ON pra2.survey_report_attachment_id = pra.survey_report_attachment_id + WHERE pra.survey_id = ${surveyId} + GROUP BY + pra.survey_report_attachment_id + , pra.survey_id + , pra.file_name + , pra.title + , pra.description + , pra.year + , pra."key" + , pra.file_size; + `; + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to get attachments data', [ + 'SurveyRepository->getReportAttachmentsData', + 'response was null or undefined, expected response != null' + ]); + } + + return new GetReportAttachmentsData(result); + } + + async insertSurveyData(projectId: number, surveyData: PostSurveyObject): Promise { + const sqlStatement = SQL` + INSERT INTO survey ( + project_id, + name, + start_date, + end_date, + lead_first_name, + lead_last_name, + field_method_id, + additional_details, + ecological_season_id, + intended_outcome_id, + location_name, + geojson, + geography + ) VALUES ( + ${projectId}, + ${surveyData.survey_details.survey_name}, + ${surveyData.survey_details.start_date}, + ${surveyData.survey_details.end_date}, + ${surveyData.survey_details.biologist_first_name}, + ${surveyData.survey_details.biologist_last_name}, + ${surveyData.purpose_and_methodology.field_method_id}, + ${surveyData.purpose_and_methodology.additional_details}, + ${surveyData.purpose_and_methodology.ecological_season_id}, + ${surveyData.purpose_and_methodology.intended_outcome_id}, + ${surveyData.location.survey_area_name}, + ${JSON.stringify(surveyData.location.geometry)} + `; + + if (surveyData.location.geometry && surveyData.location.geometry.length) { + const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(surveyData.location.geometry); + + sqlStatement.append(SQL` + ,public.geography( + public.ST_Force2D( + public.ST_SetSRID( + `); + + sqlStatement.append(geometryCollectionSQL); + + sqlStatement.append(SQL` + , 4326))) + `); + } else { + sqlStatement.append(SQL` + ,null + `); + } + + sqlStatement.append(SQL` + ) + RETURNING + survey_id as id; + `); + + const response = await this.connection.sql(sqlStatement); + + const result = (response && response.rows && response.rows[0]) || null; + + if (!result) { + throw new ApiExecuteSQLError('Failed to insert survey data', [ + 'SurveyRepository->insertSurveyData', + 'response was null or undefined, expected response != null' + ]); + } + + return result.id; + } + + async insertFocalSpecies(focal_species_id: number, surveyId: number): Promise { + const sqlStatement = SQL` + INSERT INTO study_species ( + wldtaxonomic_units_id, + is_focal, + survey_id + ) VALUES ( + ${focal_species_id}, + TRUE, + ${surveyId} + ) RETURNING study_species_id as id; + `; + + const response = await this.connection.sql(sqlStatement); + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert focal species data', [ + 'SurveyRepository->insertSurveyData', + 'response was null or undefined, expected response != null' + ]); + } + + return result.id; + } + + async insertAncillarySpecies(ancillary_species_id: number, surveyId: number): Promise { + const sqlStatement = SQL` + INSERT INTO study_species ( + wldtaxonomic_units_id, + is_focal, + survey_id + ) VALUES ( + ${ancillary_species_id}, + FALSE, + ${surveyId} + ) RETURNING study_species_id as id; + `; + + const response = await this.connection.sql(sqlStatement); + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert ancillary species data', [ + 'SurveyRepository->insertSurveyData', + 'response was null or undefined, expected response != null' + ]); + } + + return result.id; + } + + async insertVantageCodes(vantage_code_id: number, surveyId: number): Promise { + const sqlStatement = SQL` + INSERT INTO survey_vantage ( + vantage_id, + survey_id + ) VALUES ( + ${vantage_code_id}, + ${surveyId} + ) RETURNING survey_vantage_id as id; + `; + + const response = await this.connection.sql(sqlStatement); + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert vantage codes', [ + 'SurveyRepository->insertVantageCodes', + 'response was null or undefined, expected response != null' + ]); + } + return result.id; + } + + async insertSurveyProprietor(survey_proprietor: PostProprietorData, surveyId: number): Promise { + if (!survey_proprietor.survey_data_proprietary) { + return; + } + + const sqlStatement = SQL` + INSERT INTO survey_proprietor ( + survey_id, + proprietor_type_id, + first_nations_id, + rationale, + proprietor_name, + disa_required + ) VALUES ( + ${surveyId}, + ${survey_proprietor.prt_id}, + ${survey_proprietor.fn_id}, + ${survey_proprietor.rationale}, + ${survey_proprietor.proprietor_name}, + ${survey_proprietor.disa_required} + ) + RETURNING + survey_proprietor_id as id; + `; + + const response = await this.connection.sql(sqlStatement); + const result = (response && response.rows && response.rows[0]) || null; + + if (!result || !result.id) { + throw new ApiExecuteSQLError('Failed to insert survey proprietor data', [ + 'SurveyRepository->insertSurveyProprietor', + 'response was null or undefined, expected response != null' + ]); + } + + return result.id; + } + + async associateSurveyToPermit(projectId: number, surveyId: number, permitNumber: string): Promise { + const sqlStatement = SQL` + UPDATE + permit + SET + survey_id = ${surveyId} + WHERE + project_id = ${projectId} + AND + number = ${permitNumber}; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to update survey permit record', [ + 'SurveyRepository->associateSurveyToPermit', + 'response was null or undefined, expected response != null' + ]); + } + } + + async insertSurveyPermit( + systemUserId: number, + projectId: number, + surveyId: number, + permitNumber: string, + permitType: string + ): Promise { + const sqlStatement = SQL` + INSERT INTO permit ( + system_user_id, + project_id, + survey_id, + number, + type + ) VALUES ( + ${systemUserId}, + ${projectId}, + ${surveyId}, + ${permitNumber}, + ${permitType} + ) + ON CONFLICT (number) DO + UPDATE SET + survey_id = ${surveyId} + WHERE + permit.project_id = ${projectId} + AND + permit.survey_id is NULL; + `; + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to insert survey permit record', [ + 'SurveyRepository->insertSurveyPermit', + 'response was null or undefined, expected response != null' + ]); + } + } + + async insertSurveyFundingSource(funding_source_id: number, surveyId: number) { + const sqlStatement = SQL` + INSERT INTO survey_funding_source ( + survey_id, + project_funding_source_id + ) VALUES ( + ${surveyId}, + ${funding_source_id} + ); + `; + await this.connection.query(sqlStatement.text, sqlStatement.values); + } + + async updateSurveyDetailsData(surveyId: number, surveyData: PutSurveyObject) { + const knex = getKnex(); + + let fieldsToUpdate = {}; + + if (surveyData.survey_details) { + fieldsToUpdate = { + ...fieldsToUpdate, + name: surveyData.survey_details.name, + start_date: surveyData.survey_details.start_date, + end_date: surveyData.survey_details.end_date, + lead_first_name: surveyData.survey_details.lead_first_name, + lead_last_name: surveyData.survey_details.lead_last_name, + revision_count: surveyData.survey_details.revision_count + }; + } + + if (surveyData.purpose_and_methodology) { + fieldsToUpdate = { + ...fieldsToUpdate, + field_method_id: surveyData.purpose_and_methodology.field_method_id, + additional_details: surveyData.purpose_and_methodology.additional_details, + ecological_season_id: surveyData.purpose_and_methodology.ecological_season_id, + intended_outcome_id: surveyData.purpose_and_methodology.intended_outcome_id, + revision_count: surveyData.purpose_and_methodology.revision_count + }; + } + + if (surveyData.location) { + const geometrySqlStatement = SQL``; + + if (surveyData.location.geometry && surveyData.location.geometry.length) { + geometrySqlStatement.append(SQL` + public.geography( + public.ST_Force2D( + public.ST_SetSRID( + `); + + const geometryCollectionSQL = queries.spatial.generateGeometryCollectionSQL(surveyData.location.geometry); + geometrySqlStatement.append(geometryCollectionSQL); + + geometrySqlStatement.append(SQL` + , 4326))) + `); + } else { + geometrySqlStatement.append(SQL` + null + `); + } + + fieldsToUpdate = { + ...fieldsToUpdate, + location_name: surveyData.location.survey_area_name, + geojson: JSON.stringify(surveyData.location.geometry), + geography: knex.raw(geometrySqlStatement.sql, geometrySqlStatement.values), + revision_count: surveyData.location.revision_count + }; + } + + const updateSurveyQueryBuilder = knex('survey').update(fieldsToUpdate).where('survey_id', surveyId); + + const result = await this.connection.knex(updateSurveyQueryBuilder); + + if (!result || !result.rowCount) { + throw new ApiExecuteSQLError('Failed to update survey data', [ + 'SurveyRepository->updateSurveyDetailsData', + 'response was null or undefined, expected response != null' + ]); + } + } + + async deleteSurveySpeciesData(surveyId: number) { + const sqlStatement = SQL` + DELETE + from study_species + WHERE + survey_id = ${surveyId}; + `; + + await this.connection.sql(sqlStatement); + } + + async unassociatePermitFromSurvey(surveyId: number) { + const sqlStatement = SQL` + UPDATE + permit + SET + survey_id = ${null} + WHERE + survey_id = ${surveyId}; + `; + + await this.connection.sql(sqlStatement); + } + + async deleteSurveyFundingSourcesData(surveyId: number) { + const sqlStatement = SQL` + DELETE + from survey_funding_source + WHERE + survey_id = ${surveyId}; + `; + + await this.connection.sql(sqlStatement); + } + + async deleteSurveyProprietorData(surveyId: number) { + const sqlStatement = SQL` + DELETE + from survey_proprietor + WHERE + survey_id = ${surveyId}; + `; + + await this.connection.sql(sqlStatement); + } + + async deleteSurveyVantageCodes(surveyId: number) { + const sqlStatement = SQL` + DELETE + from survey_vantage + WHERE + survey_id = ${surveyId}; + `; + + await this.connection.sql(sqlStatement); + } + + /** + * Inserts a survey occurrence submission row. + * + * @param {IObservationSubmissionInsertDetails} submission The details of the submission + * @return {*} {Promise<{ submissionId: number }>} Promise resolving the ID of the submission upon successful insertion + */ + async insertSurveyOccurrenceSubmission( + submission: IObservationSubmissionInsertDetails + ): Promise<{ submissionId: number }> { + defaultLog.debug({ label: 'insertSurveyOccurrenceSubmission', submission }); + const queryBuilder = getKnex() + .table('occurrence_submission') + .insert({ + input_file_name: submission.inputFileName, + input_key: submission.inputKey, + output_file_name: submission.outputFileName, + output_key: submission.outputKey, + survey_id: submission.surveyId, + source: submission.source, + event_timestamp: `now()` + }) + .returning('occurrence_submission_id as submissionId'); + + const response = await this.connection.knex<{ submissionId: number }>(queryBuilder); + + if (!response || response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to insert survey occurrence submission', [ + 'ErrorRepository->insertSurveyOccurrenceSubmission', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + + return response.rows[0]; + } + + /** + * Updates a survey occurrence submission with the given details. + * + * @param {IObservationSubmissionUpdateDetails} submission The details of the submission to be updated + * @return {*} {Promise<{ submissionId: number }>} Promise resolving the ID of the submission upon successfully updating it + */ + async updateSurveyOccurrenceSubmission( + submission: IObservationSubmissionUpdateDetails + ): Promise<{ submissionId: number }> { + defaultLog.debug({ label: 'updateSurveyOccurrenceSubmission', submission }); + const queryBuilder = getKnex() + .table('occurrence_submission') + .update({ + input_file_name: submission.inputFileName, + input_key: submission.inputKey, + output_file_name: submission.outputFileName, + output_key: submission.outputKey + }) + .where('occurrence_submission_id', submission.submissionId) + .returning('occurrence_submission_id as submissionId'); + + const response = await this.connection.knex<{ submissionId: number }>(queryBuilder); + + if (!response || response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to update survey occurrence submission', [ + 'ErrorRepository->updateSurveyOccurrenceSubmission', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + + return response.rows[0]; + } + + /** + * Soft-deletes an occurrence submission. + * + * @param {number} submissionId The ID of the submission to soft delete + * @returns {*} {number} The row count of the affected records, namely `1` if the delete succeeds, `0` if it does not + */ + async deleteOccurrenceSubmission(submissionId: number): Promise { + defaultLog.debug({ label: 'deleteOccurrenceSubmission', submissionId }); + const queryBuilder = getKnex() + .table('occurrence_submission') + .update({ + delete_timestamp: `now()` + }) + .where('occurrence_submission_id', submissionId) + .returning('occurrence_submission_id as submissionId'); + + const response = await this.connection.knex<{ submissionId: number }>(queryBuilder); + + if (!response || response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to delete survey occurrence submission', [ + 'ErrorRepository->deleteOccurrenceSubmission', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + + return response.rowCount; + } +} diff --git a/api/src/repositories/user-repository.test.ts b/api/src/repositories/user-repository.test.ts new file mode 100644 index 0000000000..6b0e03fccd --- /dev/null +++ b/api/src/repositories/user-repository.test.ts @@ -0,0 +1,442 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { UserRepository } from './user-repository'; + +chai.use(sinonChai); + +describe('UserRepository', () => { + describe('getRoles', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should get all roles', async () => { + const mockResponse = [{ system_role_id: 1, name: 'admin' }]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.getRoles(); + + expect(response).to.equal(mockResponse); + }); + }); + + describe('getUserById', () => { + afterEach(() => { + sinon.restore(); + }); + it('should throw an error when no user is found', async () => { + const mockQueryResponse = ({ rowCount: 0, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + try { + await userRepository.getUserById(1); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiExecuteSQLError).message).to.equal('Failed to get user by id'); + } + }); + + it('should get user by id', async () => { + const mockResponse = [ + { system_user_id: 1, user_identifier: 1, record_end_date: 'data', role_ids: [1], role_names: ['admin'] } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.getUserById(1); + + expect(response).to.equal(mockResponse[0]); + }); + }); + + describe('getUserByGuid', () => { + afterEach(() => { + sinon.restore(); + }); + it('should return empty array when no user found', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.getUserByGuid('user'); + + expect(response).to.eql([]); + }); + + it('should get user by guid', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identifier: 1, + user_guid: 'aaaa', + identity_source: 'idir', + record_end_date: 'data', + role_ids: [1], + role_names: ['admin'] + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.getUserByGuid('aaaa'); + + expect(response).to.equal(mockResponse); + }); + }); + + describe('getUserByIdentifier', () => { + afterEach(() => { + sinon.restore(); + }); + it('should return empty array when no user found', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.getUserByIdentifier('user', 'source'); + + expect(response).to.eql([]); + }); + + it('should get user by identifier', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identifier: 'username', + user_guid: 'aaaa', + identity_source: 'idir', + record_end_date: 'data', + role_ids: [1], + role_names: ['admin'] + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.getUserByIdentifier('username', 'idir'); + + expect(response).to.equal(mockResponse); + }); + }); + + describe('addSystemUser', () => { + afterEach(() => { + sinon.restore(); + }); + it('should throw an error when insert fails', async () => { + const mockQueryResponse = ({ rowCount: 0, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + try { + await userRepository.addSystemUser('user-guid', 'user', 'idir'); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiExecuteSQLError).message).to.equal('Failed to insert new user'); + } + }); + + it('should insert new user', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identity_source_id: 1, + user_identifier: 'user', + user_guid: 'aaaa', + record_end_date: 'data', + record_effective_date: 'date' + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.addSystemUser('aaaa', 'user', 'idir'); + + expect(response).to.equal(mockResponse[0]); + }); + }); + + describe('listSystemUsers', () => { + afterEach(() => { + sinon.restore(); + }); + it('should return empty array when no users found', async () => { + const mockQueryResponse = ({ rowCount: 1, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.listSystemUsers(); + + expect(response).to.eql([]); + }); + + it('should get user list', async () => { + const mockResponse = [ + { system_user_id: 1, user_identifier: 1, record_end_date: 'data', role_ids: [1], role_names: ['admin'] } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.listSystemUsers(); + + expect(response).to.equal(mockResponse); + }); + }); + + describe('activateSystemUser', () => { + afterEach(() => { + sinon.restore(); + }); + it('should throw an error when activate fails', async () => { + const mockQueryResponse = ({ rowCount: 0, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + try { + await userRepository.activateSystemUser(1); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiExecuteSQLError).message).to.equal('Failed to activate system user'); + } + }); + + it('should activate user', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identity_source_id: 1, + user_identifier: 1, + record_end_date: 'data', + record_effective_date: 'date' + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.activateSystemUser(1); + + expect(response).to.equal(undefined); + }); + }); + + describe('deactivateSystemUser', () => { + afterEach(() => { + sinon.restore(); + }); + it('should throw an error when deactivate fails', async () => { + const mockQueryResponse = ({ rowCount: 0, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + try { + await userRepository.deactivateSystemUser(1); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiExecuteSQLError).message).to.equal('Failed to deactivate system user'); + } + }); + + it('should deactivate user', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identity_source_id: 1, + user_identifier: 1, + record_end_date: 'data', + record_effective_date: 'date' + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.deactivateSystemUser(1); + + expect(response).to.equal(undefined); + }); + }); + + describe('deleteUserSystemRoles', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should delete user roles', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identity_source_id: 1, + user_identifier: 1, + record_end_date: 'data', + record_effective_date: 'date' + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.deleteUserSystemRoles(1); + + expect(response).to.equal(undefined); + }); + }); + + describe('addUserSystemRoles', () => { + afterEach(() => { + sinon.restore(); + }); + it('should throw an error when adding role fails', async () => { + const mockQueryResponse = ({ rowCount: 0, rows: [] } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + try { + await userRepository.addUserSystemRoles(1, [1]); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiExecuteSQLError).message).to.equal('Failed to insert user system roles'); + } + }); + + it('should add user roles', async () => { + const mockResponse = [ + { + system_user_id: 1, + user_identity_source_id: 1, + user_identifier: 1, + record_end_date: 'data', + record_effective_date: 'date' + } + ]; + const mockQueryResponse = ({ rowCount: 1, rows: mockResponse } as any) as Promise>; + + const mockDBConnection = getMockDBConnection({ + sql: async () => { + return mockQueryResponse; + } + }); + + const userRepository = new UserRepository(mockDBConnection); + + const response = await userRepository.addUserSystemRoles(1, [1, 2]); + + expect(response).to.equal(undefined); + }); + }); +}); diff --git a/api/src/repositories/user-repository.ts b/api/src/repositories/user-repository.ts new file mode 100644 index 0000000000..50a4831539 --- /dev/null +++ b/api/src/repositories/user-repository.ts @@ -0,0 +1,410 @@ +import SQL from 'sql-template-strings'; +import { SYSTEM_IDENTITY_SOURCE } from '../constants/database'; +import { ApiExecuteSQLError } from '../errors/api-error'; +import { BaseRepository } from './base-repository'; + +export interface IGetUser { + system_user_id: number; + user_guid: string; + user_identifier: string; + identity_source: string; + record_end_date: string | null; + role_ids: number[]; + role_names: string[]; +} + +export interface IInsertUser { + system_user_id: number; + user_identity_source_id: number; + user_identifier: number; + record_effective_date: string; + record_end_date: string; +} + +export interface IGetRoles { + system_role_id: number; + name: string; +} + +export class UserRepository extends BaseRepository { + /** + * Get all system roles in db + * + * @return {*} {Promise} + * @memberof UserRepository + */ + async getRoles(): Promise { + const sqlStatement = SQL` + SELECT + sr.system_role_id, + sr.name + FROM + system_role sr + `; + + const response = await this.connection.sql(sqlStatement); + + return response.rows; + } + + /** + * Fetch a single system user by their system user ID. + * + * + * @param {number} systemUserId + * @return {*} {Promise} + * @memberof UserRepository + */ + async getUserById(systemUserId: number): Promise { + const sqlStatement = SQL` + SELECT + su.system_user_id, + su.user_identifier, + su.user_guid, + su.record_end_date, + uis.name AS identity_source, + array_remove(array_agg(sr.system_role_id), NULL) AS role_ids, + array_remove(array_agg(sr.name), NULL) AS role_names + FROM + system_user su + LEFT JOIN + system_user_role sur + ON + su.system_user_id = sur.system_user_id + LEFT JOIN + system_role sr + ON + sur.system_role_id = sr.system_role_id + LEFT JOIN + user_identity_source uis + ON + uis.user_identity_source_id = su.user_identity_source_id + WHERE + su.system_user_id = ${systemUserId} + AND + su.record_end_date IS NULL + GROUP BY + su.system_user_id, + uis.name, + su.user_guid, + su.record_end_date, + su.user_identifier; + `; + + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to get user by id', [ + 'UserRepository->getUserById', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + return response.rows[0]; + } + + /** + * Get an existing system user by their GUID. + * + * @param {string} userGuid the user's GUID + * @return {*} {Promise} + * @memberof UserRepository + */ + async getUserByGuid(userGuid: string): Promise { + const sqlStatement = SQL` + SELECT + su.system_user_id, + su.user_identifier, + su.user_guid, + su.record_end_date, + uis.name AS identity_source, + array_remove(array_agg(sr.system_role_id), NULL) AS role_ids, + array_remove(array_agg(sr.name), NULL) AS role_names + FROM + system_user su + LEFT JOIN + system_user_role sur + ON + su.system_user_id = sur.system_user_id + LEFT JOIN + system_role sr + ON + sur.system_role_id = sr.system_role_id + LEFT JOIN + user_identity_source uis + ON + uis.user_identity_source_id = su.user_identity_source_id + WHERE + su.user_guid = ${userGuid} + GROUP BY + su.system_user_id, + su.record_end_date, + su.user_identifier, + su.user_guid, + uis.name; + `; + + const response = await this.connection.sql(sqlStatement); + + return response.rows; + } + + /** + * Get an existing system user by their user identifier and identity source. + * + * @param userIdentifier the user's identifier + * @param identitySource the user's identity source, e.g. `'IDIR'` + * @return {*} {(Promise)} Promise resolving an array containing the user, if they match the + * search criteria. + * @memberof UserService + */ + async getUserByIdentifier(userIdentifier: string, identitySource: string): Promise { + const sqlStatement = SQL` + SELECT + su.system_user_id, + su.user_identifier, + su.user_guid, + su.record_end_date, + uis.name AS identity_source, + array_remove(array_agg(sr.system_role_id), NULL) AS role_ids, + array_remove(array_agg(sr.name), NULL) AS role_names + FROM + system_user su + LEFT JOIN + system_user_role sur + ON + su.system_user_id = sur.system_user_id + LEFT JOIN + system_role sr + ON + sur.system_role_id = sr.system_role_id + LEFT JOIN + user_identity_source uis + ON + uis.user_identity_source_id = su.user_identity_source_id + WHERE + su.user_identifier = ${userIdentifier} + AND + uis.name = ${identitySource} + GROUP BY + su.system_user_id, + su.record_end_date, + su.user_identifier, + su.user_guid, + uis.name; + `; + + const response = await this.connection.sql(sqlStatement); + + return response.rows; + } + + /** + * Adds a new system user. + * + * Note: Will fail if the system user already exists. + * + * @param {string | null} userGuid + * @param {string} userIdentifier + * @param {string} identitySource + * @return {*} {Promise} + * @memberof UserRepository + */ + async addSystemUser(userGuid: string | null, userIdentifier: string, identitySource: string): Promise { + const sqlStatement = SQL` + INSERT INTO + system_user + ( + user_guid, + user_identity_source_id, + user_identifier, + record_effective_date + ) + VALUES ( + ${userGuid ? userGuid.toLowerCase() : null}, + ( + SELECT + user_identity_source_id + FROM + user_identity_source + WHERE + name = ${identitySource.toUpperCase()} + ), + ${userIdentifier}, + now() + ) + RETURNING + system_user_id, + user_identity_source_id, + user_identifier, + record_effective_date, + record_end_date; + `; + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to insert new user', [ + 'UserRepository->addSystemUser', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + return response.rows[0]; + } + + /** + * Get a list of all system users. + * + * @return {*} {Promise} + * @memberof UserRepository + */ + async listSystemUsers(): Promise { + const sqlStatement = SQL` + SELECT + su.system_user_id, + su.user_guid, + su.user_identifier, + su.record_end_date, + uis.name AS identity_source, + array_remove(array_agg(sr.system_role_id), NULL) AS role_ids, + array_remove(array_agg(sr.name), NULL) AS role_names + FROM + system_user su + LEFT JOIN + system_user_role sur + ON + su.system_user_id = sur.system_user_id + LEFT JOIN + system_role sr + ON + sur.system_role_id = sr.system_role_id + LEFT JOIN + user_identity_source uis + ON + su.user_identity_source_id = uis.user_identity_source_id + WHERE + su.record_end_date IS NULL AND uis.name not in (${SYSTEM_IDENTITY_SOURCE.DATABASE}, ${SYSTEM_IDENTITY_SOURCE.SYSTEM}) + GROUP BY + su.system_user_id, + su.user_guid, + su.record_end_date, + su.user_identifier, + uis.name; + `; + const response = await this.connection.sql(sqlStatement); + + return response.rows; + } + + /** + * Activates an existing system user that had been deactivated (soft deleted). + * + * @param {number} systemUserId + * @memberof UserRepository + */ + async activateSystemUser(systemUserId: number) { + const sqlStatement = SQL` + UPDATE + system_user + SET + record_end_date = NULL + WHERE + system_user_id = ${systemUserId} + RETURNING + system_user_id, + user_identity_source_id, + user_identifier, + record_effective_date, + record_end_date; + `; + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to activate system user', [ + 'UserRepository->activateSystemUser', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + } + + /** + * Deactivates an existing system user (soft delete). + * + * @param {number} systemUserId + * @memberof UserRepository + */ + async deactivateSystemUser(systemUserId: number) { + const sqlStatement = SQL` + UPDATE + system_user + SET + record_end_date = now() + WHERE + system_user_id = ${systemUserId} + RETURNING + *; + `; + + const response = await this.connection.sql(sqlStatement); + + if (response.rowCount !== 1) { + throw new ApiExecuteSQLError('Failed to deactivate system user', [ + 'UserRepository->deactivateSystemUser', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + } + + /** + * Delete all system roles for the user. + * + * @param {number} systemUserId + * @memberof UserRepository + */ + async deleteUserSystemRoles(systemUserId: number) { + const sqlStatement = SQL` + DELETE FROM + system_user_role + WHERE + system_user_id = ${systemUserId} + RETURNING + *; + `; + + await this.connection.sql(sqlStatement); + } + + /** + * Adds the specified roleIds to the user. + * + * @param {number} systemUserId + * @param {number[]} roleIds + * @memberof UserRepository + */ + async addUserSystemRoles(systemUserId: number, roleIds: number[]) { + const sqlStatement = SQL` + INSERT INTO system_user_role ( + system_user_id, + system_role_id + ) VALUES `; + + roleIds.forEach((roleId, index) => { + sqlStatement.append(SQL` + (${systemUserId},${roleId}) + `); + + if (index !== roleIds.length - 1) { + sqlStatement.append(','); + } + }); + + sqlStatement.append(';'); + + const response = await this.connection.sql(sqlStatement); + + if (!response.rowCount) { + throw new ApiExecuteSQLError('Failed to insert user system roles', [ + 'UserRepository->addUserSystemRoles', + 'rowCount was null or undefined, expected rowCount = 1' + ]); + } + } +} diff --git a/api/src/repositories/validation-repository.test.ts b/api/src/repositories/validation-repository.test.ts new file mode 100644 index 0000000000..6f5a464543 --- /dev/null +++ b/api/src/repositories/validation-repository.test.ts @@ -0,0 +1,109 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { HTTP400 } from '../errors/http-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { ValidationRepository } from './validation-repository'; + +chai.use(sinonChai); + +describe('ValidationRepository', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getTemplateMethodologySpeciesRecord', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should succeed with valid data', async () => { + sinon.stub(ValidationRepository.prototype, 'getTemplateNameVersionId').resolves({ template_id: 1 }); + + const templateName = 'template Name'; + const templateVersion = '1'; + const fieldMethodId = 10; + const surveySpecies = [10]; + + const mockResponse = ({ + rows: [ + { + template_methodology_species_id: 1, + wldtaxonomic_units_id: '10', + validation: '{}', + transform: '{}' + } + ] + } as any) as Promise>; + + const dbConnection = getMockDBConnection({ + knex: async () => mockResponse + }); + + const repo = new ValidationRepository(dbConnection); + const response = await repo.getTemplateMethodologySpeciesRecord( + templateName, + templateVersion, + fieldMethodId, + surveySpecies + ); + expect(response.template_methodology_species_id).to.be.eql(1); + expect(response.validation).to.be.eql('{}'); + expect(response.transform).to.be.eql('{}'); + }); + + it('should throw an error', async () => { + sinon.stub(ValidationRepository.prototype, 'getTemplateNameVersionId').resolves({ template_id: 1 }); + + const mockResponse = ({} as any) as Promise>; + const dbConnection = getMockDBConnection({ + knex: async () => mockResponse + }); + + const repo = new ValidationRepository(dbConnection); + + try { + await repo.getTemplateMethodologySpeciesRecord('name', 'version', 1, [1]); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query template methodology species table'); + } + }); + }); + + describe('getTemplateMethodologySpeciesRecord', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw an error', async () => { + const mockResponse = (null as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: async () => mockResponse + }); + + const repo = new ValidationRepository(dbConnection); + + try { + await repo.getTemplateNameVersionId('name', 'version'); + expect.fail(); + } catch (error) { + expect((error as HTTP400).message).to.be.eql('Failed to query template table'); + } + }); + + it('should succeed with valid data', async () => { + const mockResponse = ({ rows: [{ template_id: 1 }] } as any) as Promise>; + const dbConnection = getMockDBConnection({ + query: async () => mockResponse + }); + + const repo = new ValidationRepository(dbConnection); + + const response = await repo.getTemplateNameVersionId('name', 'version'); + expect(response.template_id).to.be.eql(1); + }); + }); +}); diff --git a/api/src/repositories/validation-repository.ts b/api/src/repositories/validation-repository.ts new file mode 100644 index 0000000000..dbac8c6e25 --- /dev/null +++ b/api/src/repositories/validation-repository.ts @@ -0,0 +1,120 @@ +import SQL from 'sql-template-strings'; +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { getKnex } from '../database/db'; +import { HTTP400 } from '../errors/http-error'; +import { TransformSchema } from '../utils/media/xlsx/transformation/xlsx-transform-schema-parser'; +import { MessageError, SubmissionError } from '../utils/submission-error'; +import { BaseRepository } from './base-repository'; + +export interface ITemplateMethodologyData { + template_methodology_species_id: number; + wldtaxonomic_units_id: string; + validation: string; + transform: TransformSchema; +} + +export class ValidationRepository extends BaseRepository { + /** + * Get a template_methodology_species record from the template_methodologies_species table + * + * @param {string} templateName + * @param {string} templateVersion + * @param {number} surveyFieldMethodId + * @param {number[]} surveySpecies + * @return {*} {Promise} + * @memberof ValidationRepository + */ + async getTemplateMethodologySpeciesRecord( + templateName: string, + templateVersion: string, + surveyFieldMethodId: number, + surveySpecies: number[] + ): Promise { + const templateRow = await this.getTemplateNameVersionId(templateName, templateVersion); + + const failedToFindValidationRulesError = new SubmissionError({ + status: SUBMISSION_STATUS_TYPE.FAILED_VALIDATION, + messages: [ + new MessageError( + SUBMISSION_MESSAGE_TYPE.FAILED_GET_VALIDATION_RULES, + `Could not find any validation schema associated with Template Name "${templateName}" and Template Version "${templateVersion}".` + ) + ] + }); + + // No template validation record is found for the given template name and version + if (!templateRow) { + throw failedToFindValidationRulesError; + } + + const queryBuilder = getKnex() + .select('template_methodology_species_id', 'wldtaxonomic_units_id', 'validation', 'transform') + .from('template_methodology_species') + .where('template_id', templateRow.template_id) + .and.where(function (qb) { + qb.or.where('field_method_id', surveyFieldMethodId); + qb.or.where('field_method_id', null); + }); + + const response = await this.connection.knex(queryBuilder); + + // Querying the template methodology species table fails + if (!response || !response.rows) { + throw new HTTP400('Failed to query template methodology species table'); + } + + // Failure to find a template methodology species record for this template name and verion; Should yield a validation error. + if (response.rows.length === 0) { + throw failedToFindValidationRulesError; + } + + // Some template methodology species records are found for this template name and version, but none are associated with this + // particular surveySpecies, indicating that the wrong focal species was likely selected. + if (!response.rows.some((row) => surveySpecies.includes(Number(row.wldtaxonomic_units_id)))) { + throw new SubmissionError({ + status: SUBMISSION_STATUS_TYPE.FAILED_VALIDATION, + messages: [ + new MessageError( + SUBMISSION_MESSAGE_TYPE.MISMATCHED_TEMPLATE_SURVEY_SPECIES, + 'The focal species imported from this template does not match the focal species selected for this survey.' + ) + ] + }); + } + + // Return the first result among all records that match on the given surveySpecies. + return response.rows.filter((row) => { + return surveySpecies.includes(Number(row.wldtaxonomic_units_id)); + })[0]; + } + + /** + * Get the Template Id from a Template name and Version number + * + * @param {string} templateName + * @param {string} templateVersion + * @return {*} {Promise<{ template_id: number }>} + * @memberof ValidationRepository + */ + async getTemplateNameVersionId(templateName: string, templateVersion: string): Promise<{ template_id: number }> { + const sqlStatement = SQL` + SELECT + t.template_id + FROM + template t + WHERE + t.name = ${templateName} + AND + t.version = ${templateVersion} + ; + `; + + const response = await this.connection.query<{ template_id: number }>(sqlStatement.text, sqlStatement.values); + + if (!response) { + throw new HTTP400('Failed to query template table'); + } + + return response && response.rows && response.rows[0]; + } +} diff --git a/api/src/request-handlers/security/authentication.test.ts b/api/src/request-handlers/security/authentication.test.ts index 1bcbe03e94..80aa956d0c 100644 --- a/api/src/request-handlers/security/authentication.test.ts +++ b/api/src/request-handlers/security/authentication.test.ts @@ -1,7 +1,7 @@ import { expect } from 'chai'; import { Request } from 'express'; import { describe } from 'mocha'; -import { HTTP401 } from '../../errors/custom-error'; +import { HTTP401 } from '../../errors/http-error'; import * as authentication from './authentication'; describe('authenticateRequest', function () { diff --git a/api/src/request-handlers/security/authentication.ts b/api/src/request-handlers/security/authentication.ts index ca49d85fe0..010e501572 100644 --- a/api/src/request-handlers/security/authentication.ts +++ b/api/src/request-handlers/security/authentication.ts @@ -1,22 +1,22 @@ import { Request } from 'express'; -import { decode, GetPublicKeyOrSecret, Secret, verify, VerifyErrors } from 'jsonwebtoken'; +import { decode, verify } from 'jsonwebtoken'; import { JwksClient } from 'jwks-rsa'; -import { HTTP401 } from '../../errors/custom-error'; +import { HTTP401 } from '../../errors/http-error'; import { getLogger } from '../../utils/logger'; const defaultLog = getLogger('request-handlers/security/authentication'); -const KEYCLOAK_URL = - process.env.KEYCLOAK_URL || 'https://dev.oidc.gov.bc.ca/auth/realms/35r1iman/protocol/openid-connect/certs'; +const KEYCLOAK_URL = `${process.env.KEYCLOAK_HOST}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/certs`; +const KEYCLOAK_ISSUER = `${process.env.KEYCLOAK_HOST}/realms/${process.env.KEYCLOAK_REALM}`; /** * Authenticate the request by validating the authorization bearer token (JWT). * * Assign the bearer token to `req.keycloak_token`. * - * @param {*} req + * @param {Request} req * @return {*} {Promise} true if the token is authenticated - * @throws {HTTP401} if the token is not authenticated + * @throws {HTTP401} if the bearer token is missing or invalid */ export const authenticateRequest = async function (req: Request): Promise { try { @@ -72,7 +72,7 @@ export const authenticateRequest = async function (req: Request): Promise const signingKey = key.getPublicKey(); // Verify token using public signing key - const verifiedToken = verifyToken(tokenString, signingKey); + const verifiedToken = verify(tokenString, signingKey, { issuer: [KEYCLOAK_ISSUER] }); if (!verifiedToken) { throw new HTTP401('Access Denied'); @@ -89,43 +89,18 @@ export const authenticateRequest = async function (req: Request): Promise }; /** - * Verify jwt token. + * optionally authenticate the request by validating the authorization bearer token (JWT), if one exists on the request. * - * @param {string} tokenString - * @param {(Secret | GetPublicKeyOrSecret)} secretOrPublicKey - * @return {*} The decoded token, or null. - */ -const verifyToken = function (tokenString: string, secretOrPublicKey: Secret | GetPublicKeyOrSecret): any { - return verify(tokenString, secretOrPublicKey, verifyTokenCallback); -}; - -/** - * Callback that returns the decoded token, or null. + * If a valid token exists, assign the bearer token to `req.keycloak_token`, return true. + * + * If a valid token does not exist, return true. * - * @param {(VerifyErrors | null)} verificationError - * @param {(object | undefined)} verifiedToken - * @return {*} {(object | null | undefined)} + * Why? This authentication method should be used for endpoints where authentication is optional, but the response is + * different based on whether or not the request is authenticated. + * + * @param {Request} req + * @return {*} {Promise} */ -const verifyTokenCallback = function ( - verificationError: VerifyErrors | null, - verifiedToken: object | undefined -): object | null | undefined { - if (verificationError) { - defaultLog.warn({ label: 'verifyToken', message: 'jwt verification error', verificationError }); - return null; - } - - // Verify that the token came from the expected issuer - // Example: when running in prod, only accept tokens from `sso.pathfinder...` and not `sso-dev.pathfinder...`, etc - if (!KEYCLOAK_URL.includes(verifiedToken?.['iss'])) { - defaultLog.warn({ - label: 'verifyToken', - message: 'jwt verification error: issuer mismatch', - 'actual token issuer': verifiedToken?.['iss'], - 'expected to be a substring of': KEYCLOAK_URL - }); - return null; - } - - return verifiedToken; +export const authenticateRequestOptional = async function (req: Request): Promise { + return authenticateRequest(req).catch(() => true); }; diff --git a/api/src/request-handlers/security/authorization.test.ts b/api/src/request-handlers/security/authorization.test.ts index ea0ad4a25b..7a466e7b12 100644 --- a/api/src/request-handlers/security/authorization.test.ts +++ b/api/src/request-handlers/security/authorization.test.ts @@ -4,12 +4,10 @@ import { describe } from 'mocha'; import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import { PROJECT_ROLE, SYSTEM_ROLE } from '../../constants/roles'; import * as db from '../../database/db'; -import { HTTPError } from '../../errors/custom-error'; +import { HTTPError } from '../../errors/http-error'; import { ProjectUserObject, UserObject } from '../../models/user'; -import project_participation_queries from '../../queries/project-participation'; import { UserService } from '../../services/user-service'; import { getMockDBConnection, getRequestHandlerMocks } from '../../__mocks__/db'; import * as authorization from './authorization'; @@ -755,31 +753,12 @@ describe('getProjectUserWithRoles', function () { expect(result).to.be.null; }); - it('returns null if the get user by id SQL statement is null', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - - const mockUsersByIdSQLResponse = null; - sinon - .stub(project_participation_queries, 'getProjectParticipationBySystemUserSQL') - .returns(mockUsersByIdSQLResponse); - - const result = await authorization.getProjectUserWithRoles(1, mockDBConnection); - - expect(result).to.be.null; - }); - it('returns the first row of the response', async function () { const mockResponseRow = { 'Test Column': 'Test Value' }; const mockQueryResponse = ({ rowCount: 1, rows: [mockResponseRow] } as unknown) as QueryResult; const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); sinon.stub(db, 'getDBConnection').returns(mockDBConnection); - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon - .stub(project_participation_queries, 'getProjectParticipationBySystemUserSQL') - .returns(mockUsersByIdSQLResponse); - const result = await authorization.getProjectUserWithRoles(1, mockDBConnection); expect(result).to.eql(mockResponseRow); diff --git a/api/src/request-handlers/security/authorization.ts b/api/src/request-handlers/security/authorization.ts index f0f2193c58..4f642909d7 100644 --- a/api/src/request-handlers/security/authorization.ts +++ b/api/src/request-handlers/security/authorization.ts @@ -1,10 +1,9 @@ -import { Request } from 'express'; -import { RequestHandler } from 'express-serve-static-core'; +import { Request, RequestHandler } from 'express'; +import SQL from 'sql-template-strings'; import { PROJECT_ROLE, SYSTEM_ROLE } from '../../constants/roles'; import { getDBConnection, IDBConnection } from '../../database/db'; -import { HTTP403, HTTP500 } from '../../errors/custom-error'; +import { HTTP403, HTTP500 } from '../../errors/http-error'; import { ProjectUserObject, UserObject } from '../../models/user'; -import { queries } from '../../queries/queries'; import { UserService } from '../../services/user-service'; import { getLogger } from '../../utils/logger'; @@ -149,13 +148,13 @@ export const executeAuthorizeConfig = async ( for (const authorizeRule of authorizeRules) { switch (authorizeRule.discriminator) { case 'SystemRole': - authorizeResults.push(await authorizeBySystemRole(req, authorizeRule, connection)); + authorizeResults.push(await authorizeBySystemRole(req, authorizeRule, connection).catch(() => false)); break; case 'ProjectRole': - authorizeResults.push(await authorizeByProjectRole(req, authorizeRule, connection)); + authorizeResults.push(await authorizeByProjectRole(req, authorizeRule, connection).catch(() => false)); break; case 'SystemUser': - authorizeResults.push(await authorizeBySystemUser(req, connection)); + authorizeResults.push(await authorizeBySystemUser(req, connection).catch(() => false)); break; } } @@ -378,7 +377,34 @@ export const getProjectUserWithRoles = async function (projectId: number, connec return null; } - const sqlStatement = queries.projectParticipation.getProjectParticipationBySystemUserSQL(projectId, systemUserId); + const sqlStatement = SQL` + SELECT + pp.project_id, + pp.system_user_id, + su.record_end_date, + array_remove(array_agg(pr.project_role_id), NULL) AS project_role_ids, + array_remove(array_agg(pr.name), NULL) AS project_role_names + FROM + project_participation pp + LEFT JOIN + project_role pr + ON + pp.project_role_id = pr.project_role_id + LEFT JOIN + system_user su + ON + pp.system_user_id = su.system_user_id + WHERE + pp.project_id = ${projectId} + AND + pp.system_user_id = ${systemUserId} + AND + su.record_end_date is NULL + GROUP BY + pp.project_id, + pp.system_user_id, + su.record_end_date ; + `; if (!sqlStatement) { return null; diff --git a/api/src/services/attachment-service.test.ts b/api/src/services/attachment-service.test.ts new file mode 100644 index 0000000000..3068ffcbd1 --- /dev/null +++ b/api/src/services/attachment-service.test.ts @@ -0,0 +1,879 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import { QueryResult } from 'pg'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { PostReportAttachmentMetadata, PutReportAttachmentMetadata } from '../models/project-survey-attachments'; +import { + AttachmentRepository, + IProjectAttachment, + IProjectReportAttachment, + IReportAttachmentAuthor, + ISurveyAttachment, + ISurveyReportAttachment +} from '../repositories/attachment-repository'; +import * as file_utils from '../utils/file-utils'; +import { getMockDBConnection } from '../__mocks__/db'; +import { AttachmentService } from './attachment-service'; +chai.use(sinonChai); + +describe('AttachmentService', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('Project', () => { + describe('Attachment', () => { + describe('getProjectAttachments', () => { + it('should return IProjectAttachment[]', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = [({ id: 1 } as unknown) as IProjectAttachment]; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectAttachments').resolves(data); + + const response = await service.getProjectAttachments(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getProjectAttachmentById', () => { + it('should return IProjectAttachment', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as IProjectAttachment; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectAttachmentById').resolves(data); + + const response = await service.getProjectAttachmentById(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertProjectAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'insertProjectAttachment').resolves(data); + + const response = await service.insertProjectAttachment( + ({} as unknown) as Express.Multer.File, + 1, + 'string', + 'string' + ); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('updateProjectAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'updateProjectAttachment').resolves(data); + + const response = await service.updateProjectAttachment('string', 1, 'string'); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getProjectAttachmentByFileName', () => { + it('should return QueryResult', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as QueryResult; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectAttachmentByFileName').resolves(data); + + const response = await service.getProjectAttachmentByFileName('string', 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('upsertProjectAttachment', () => { + it('should update and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getProjectAttachmentByFileName') + .resolves(({ rowCount: 1 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'updateProjectAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const response = await service.upsertProjectAttachment(({} as unknown) as Express.Multer.File, 1, 'string'); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + + it('should insert and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getProjectAttachmentByFileName') + .resolves(({ rowCount: 0 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'insertProjectAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const response = await service.upsertProjectAttachment(({} as unknown) as Express.Multer.File, 1, 'string'); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getProjectAttachmentS3Key', () => { + it('should return s3 key', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = 'key'; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectAttachmentS3Key').resolves(data); + + const response = await service.getProjectAttachmentS3Key(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('deleteProjectAttachment', () => { + it('should return key string', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { key: 'key' }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'deleteProjectAttachment').resolves(data); + + const response = await service.deleteProjectAttachment(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + }); + + describe('Report Attachment', () => { + describe('getProjectReportAttachments', () => { + it('should return IProjectReportAttachment[]', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = [({ id: 1 } as unknown) as IProjectReportAttachment]; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectReportAttachments').resolves(data); + + const response = await service.getProjectReportAttachments(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getProjectReportAttachmentById', () => { + it('should return IProjectReportAttachment', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as IProjectReportAttachment; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectReportAttachmentById').resolves(data); + + const response = await service.getProjectReportAttachmentById(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getProjectReportAttachmentAuthors', () => { + it('should return IReportAttachmentAuthor[]', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = [({ id: 1 } as unknown) as IReportAttachmentAuthor]; + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'getProjectReportAttachmentAuthors') + .resolves(data); + + const response = await service.getProjectReportAttachmentAuthors(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertProjectReportAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'insertProjectReportAttachment').resolves(data); + + const response = await service.insertProjectReportAttachment( + 'string', + 1, + 1, + ({ title: 'string' } as unknown) as PostReportAttachmentMetadata, + 'string' + ); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('updateProjectReportAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'updateProjectReportAttachment').resolves(data); + + const response = await service.updateProjectReportAttachment('string', 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('deleteProjectReportAttachmentAuthors', () => { + it('should call once and return void', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as QueryResult; + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'deleteProjectReportAttachmentAuthors') + .resolves(data); + + const response = await service.deleteProjectReportAttachmentAuthors(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertProjectReportAttachmentAuthor', () => { + it('should call once and return void', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'insertProjectReportAttachmentAuthor').resolves(); + + const response = await service.insertProjectReportAttachmentAuthor(1, { + first_name: 'first', + last_name: 'last' + }); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('getProjectReportAttachmentByFileName', () => { + it('should return QueryResult', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as QueryResult; + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'getProjectReportAttachmentByFileName') + .resolves(data); + + const response = await service.getProjectReportAttachmentByFileName(1, 'string'); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('upsertProjectReportAttachment', () => { + it('should update and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getProjectReportAttachmentByFileName') + .resolves(({ rowCount: 1 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'updateProjectReportAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const serviceStub3 = sinon + .stub(AttachmentService.prototype, 'deleteProjectReportAttachmentAuthors') + .resolves(); + + const serviceStub4 = sinon + .stub(AttachmentService.prototype, 'insertProjectReportAttachmentAuthor') + .resolves(); + + const response = await service.upsertProjectReportAttachment(({} as unknown) as Express.Multer.File, 1, { + title: 'string', + authors: [{ first_name: 'first', last_name: 'last' }] + }); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(serviceStub3).to.be.calledOnce; + expect(serviceStub4).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + + it('should insert and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getProjectReportAttachmentByFileName') + .resolves(({ rowCount: 0 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'insertProjectReportAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const serviceStub3 = sinon + .stub(AttachmentService.prototype, 'deleteProjectReportAttachmentAuthors') + .resolves(); + + const serviceStub4 = sinon + .stub(AttachmentService.prototype, 'insertProjectReportAttachmentAuthor') + .resolves(); + + const response = await service.upsertProjectReportAttachment(({} as unknown) as Express.Multer.File, 1, { + title: 'string', + authors: [{ first_name: 'first', last_name: 'last' }] + }); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(serviceStub3).to.be.calledOnce; + expect(serviceStub4).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getProjectReportAttachmentS3Key', () => { + it('should return s3 key', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = 'key'; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getProjectReportAttachmentS3Key').resolves(data); + + const response = await service.getProjectReportAttachmentS3Key(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('updateProjectReportAttachmentMetadata', () => { + it('should return void', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'updateProjectReportAttachmentMetadata') + .resolves(); + + const response = await service.updateProjectReportAttachmentMetadata(1, 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('deleteProjectReportAttachment', () => { + it('should return key string', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { key: 'key' }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'deleteProjectReportAttachment').resolves(data); + + const response = await service.deleteProjectReportAttachment(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + }); + }); + + describe('Survey', () => { + describe('Attachment', () => { + describe('getSurveyAttachments', () => { + it('should return ISurveyAttachment[]', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = [({ id: 1 } as unknown) as ISurveyAttachment]; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getSurveyAttachments').resolves(data); + + const response = await service.getSurveyAttachments(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('deleteSurveyAttachment', () => { + it('should return key string', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { key: 'key' }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'deleteSurveyAttachment').resolves(data); + + const response = await service.deleteSurveyAttachment(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyAttachmentS3Key', () => { + it('should return s3 key', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = 'key'; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getSurveyAttachmentS3Key').resolves(data); + + const response = await service.getSurveyAttachmentS3Key(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('updateSurveyAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'updateSurveyAttachment').resolves(data); + + const response = await service.updateSurveyAttachment(1, 'string', 'string'); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertSurveyAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'insertSurveyAttachment').resolves(data); + + const response = await service.insertSurveyAttachment('string', 1, 'string', 1, 'string'); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyAttachmentByFileName', () => { + it('should return QueryResult', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as QueryResult; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getSurveyAttachmentByFileName').resolves(data); + + const response = await service.getSurveyAttachmentByFileName('string', 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('upsertSurveyAttachment', () => { + it('should update and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachmentByFileName') + .resolves(({ rowCount: 1 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'updateSurveyAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const response = await service.upsertSurveyAttachment(({} as unknown) as Express.Multer.File, 1, 1, 'string'); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + + it('should insert and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachmentByFileName') + .resolves(({ rowCount: 0 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'insertSurveyAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const response = await service.upsertSurveyAttachment(({} as unknown) as Express.Multer.File, 1, 1, 'string'); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + }); + + describe('Report Attachment', () => { + describe('getSurveyReportAttachments', () => { + it('should return ISurveyReportAttachment[]', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = [({ id: 1 } as unknown) as ISurveyReportAttachment]; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getSurveyReportAttachments').resolves(data); + + const response = await service.getSurveyReportAttachments(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyReportAttachmentById', () => { + it('should return ISurveyReportAttachment', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as ISurveyReportAttachment; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getSurveyReportAttachmentById').resolves(data); + + const response = await service.getSurveyReportAttachmentById(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyAttachmentAuthors', () => { + it('should return IReportAttachmentAuthor[]', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = [({ id: 1 } as unknown) as IReportAttachmentAuthor]; + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'getSurveyReportAttachmentAuthors') + .resolves(data); + + const response = await service.getSurveyAttachmentAuthors(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertSurveyReportAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'insertSurveyReportAttachment').resolves(data); + + const response = await service.insertSurveyReportAttachment( + 'string', + 1, + 1, + ({ title: 'string' } as unknown) as PostReportAttachmentMetadata, + 'string' + ); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('updateSurveyReportAttachment', () => { + it('should return { id: number; revision_count: number }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1 }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'updateSurveyReportAttachment').resolves(data); + + const response = await service.updateSurveyReportAttachment('string', 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('deleteSurveyReportAttachmentAuthors', () => { + it('should call once and return void', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'deleteSurveyReportAttachmentAuthors').resolves(); + + const response = await service.deleteSurveyReportAttachmentAuthors(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('insertSurveyReportAttachmentAuthor', () => { + it('should call once and return void', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'insertSurveyReportAttachmentAuthor').resolves(); + + const response = await service.insertSurveyReportAttachmentAuthor(1, { + first_name: 'first', + last_name: 'last' + }); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('getSurveyReportAttachmentByFileName', () => { + it('should return QueryResult', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = ({ id: 1 } as unknown) as QueryResult; + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'getSurveyReportAttachmentByFileName') + .resolves(data); + + const response = await service.getSurveyReportAttachmentByFileName(1, 'string'); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('upsertSurveyReportAttachment', () => { + it('should update and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachmentByFileName') + .resolves(({ rowCount: 1 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'updateSurveyReportAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const serviceStub3 = sinon + .stub(AttachmentService.prototype, 'deleteSurveyReportAttachmentAuthors') + .resolves(); + + const serviceStub4 = sinon.stub(AttachmentService.prototype, 'insertSurveyReportAttachmentAuthor').resolves(); + + const response = await service.upsertSurveyReportAttachment(({} as unknown) as Express.Multer.File, 1, 1, { + title: 'string', + authors: [{ first_name: 'first', last_name: 'last' }] + }); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(serviceStub3).to.be.calledOnce; + expect(serviceStub4).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + + it('should insert and return { id: number; revision_count: number; key: string }', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { id: 1, revision_count: 1, key: 'key' }; + + const fileStub = sinon.stub(file_utils, 'generateS3FileKey').returns('key'); + + const serviceStub1 = sinon + .stub(AttachmentService.prototype, 'getSurveyReportAttachmentByFileName') + .resolves(({ rowCount: 0 } as unknown) as QueryResult); + + const serviceStub2 = sinon + .stub(AttachmentService.prototype, 'insertSurveyReportAttachment') + .resolves({ id: 1, revision_count: 1 }); + + const serviceStub3 = sinon + .stub(AttachmentService.prototype, 'deleteSurveyReportAttachmentAuthors') + .resolves(); + + const serviceStub4 = sinon.stub(AttachmentService.prototype, 'insertSurveyReportAttachmentAuthor').resolves(); + + const response = await service.upsertSurveyReportAttachment(({} as unknown) as Express.Multer.File, 1, 1, { + title: 'string', + authors: [{ first_name: 'first', last_name: 'last' }] + }); + + expect(serviceStub1).to.be.calledOnce; + expect(serviceStub2).to.be.calledOnce; + expect(serviceStub3).to.be.calledOnce; + expect(serviceStub4).to.be.calledOnce; + expect(fileStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('deleteSurveyReportAttachment', () => { + it('should return key string', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = { key: 'key' }; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'deleteSurveyReportAttachment').resolves(data); + + const response = await service.deleteSurveyReportAttachment(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyReportAttachmentS3Key', () => { + it('should return s3 key', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const data = 'key'; + + const repoStub = sinon.stub(AttachmentRepository.prototype, 'getSurveyReportAttachmentS3Key').resolves(data); + + const response = await service.getSurveyReportAttachmentS3Key(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('updateSurveyReportAttachmentMetadata', () => { + it('should return void', async () => { + const dbConnection = getMockDBConnection(); + const service = new AttachmentService(dbConnection); + + const repoStub = sinon + .stub(AttachmentRepository.prototype, 'updateSurveyReportAttachmentMetadata') + .resolves(); + + const response = await service.updateSurveyReportAttachmentMetadata(1, 1, ({ + title: 'string' + } as unknown) as PutReportAttachmentMetadata); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + }); + }); +}); diff --git a/api/src/services/attachment-service.ts b/api/src/services/attachment-service.ts new file mode 100644 index 0000000000..20d71cf540 --- /dev/null +++ b/api/src/services/attachment-service.ts @@ -0,0 +1,510 @@ +import { QueryResult } from 'pg'; +import { IDBConnection } from '../database/db'; +import { PostReportAttachmentMetadata, PutReportAttachmentMetadata } from '../models/project-survey-attachments'; +import { + AttachmentRepository, + IProjectAttachment, + IProjectReportAttachment, + IReportAttachmentAuthor, + ISurveyAttachment, + ISurveyReportAttachment +} from '../repositories/attachment-repository'; +import { generateS3FileKey } from '../utils/file-utils'; +import { DBService } from './db-service'; + +export interface IAttachmentType { + id: number; + type: 'Report' | 'Other'; +} + +/** + * A repository class for accessing project and survey attachment data. + * + * @export + * @class AttachmentRepository + * @extends {BaseRepository} + */ +export class AttachmentService extends DBService { + attachmentRepository: AttachmentRepository; + + constructor(connection: IDBConnection) { + super(connection); + + this.attachmentRepository = new AttachmentRepository(connection); + } + + /** + * Finds all of the project attachments for the given project ID. + * @param {number} projectId the ID of the project + * @return {Promise} Promise resolving all project attachments. + * @memberof AttachmentService + */ + async getProjectAttachments(projectId: number): Promise { + return this.attachmentRepository.getProjectAttachments(projectId); + } + + /** + * Finds a project attachment having the given project ID and attachment ID + * @param {number} projectId the ID of the project + * @param {number} attachmentId the ID of the attachment + * @return {Promise} Promise resolving the given project attachment + * @memberof AttachmentService + */ + async getProjectAttachmentById(projectId: number, attachmentId: number): Promise { + return this.attachmentRepository.getProjectAttachmentById(projectId, attachmentId); + } + + /** + * Finds all authors belonging to the given project report attachment + * @param {number} reportAttachmentId the ID of the report attachment + * @return {Promise} Promise resolving all of the report authors + * @memberof AttachmentService + */ + async getProjectReportAttachmentAuthors(reportAttachmentId: number): Promise { + return this.attachmentRepository.getProjectReportAttachmentAuthors(reportAttachmentId); + } + + /** + * Finds all of the project report attachments for the given project ID. + * @param {number} projectId the ID of the project + * @return {Promise} Promise resolving all project report attachments. + * @memberof AttachmentService + */ + async getProjectReportAttachments(projectId: number): Promise { + return this.attachmentRepository.getProjectReportAttachments(projectId); + } + + /** + * Finds a project report attachment having the given project ID and report attachment ID + * @param {number} projectId the ID of the project + * @param {number} reportAttachmentId the ID of the report attachment + * @return {Promise} Promise resolving the given project report attachment + * @memberof AttachmentService + */ + async getProjectReportAttachmentById( + projectId: number, + reportAttachmentId: number + ): Promise { + return this.attachmentRepository.getProjectReportAttachmentById(projectId, reportAttachmentId); + } + + /** + * Finds all of the survey attachments for the given survey ID. + * @param {number} surveyId the ID of the survey + * @return {Promise} Promise resolving all survey attachments. + * @memberof AttachmentService + */ + async getSurveyAttachments(surveyId: number): Promise { + return this.attachmentRepository.getSurveyAttachments(surveyId); + } + + /** + * Finds all of the survey report attachments for the given survey ID. + * @param {number} surveyId the ID of the survey + * @return {Promise} Promise resolving all survey report attachments. + * @memberof AttachmentService + */ + async getSurveyReportAttachments(surveyId: number): Promise { + return this.attachmentRepository.getSurveyReportAttachments(surveyId); + } + + /** + * Finds a survey report attachment having the given survey ID and attachment ID + * @param {number} surveyId the ID of the survey + * @param {number} reportAttachmentId the ID of the survey report attachment + * @return {Promise} Promise resolving the given survey attachment + * @memberof AttachmentService + */ + async getSurveyReportAttachmentById(surveyId: number, reportAttachmentId: number): Promise { + return this.attachmentRepository.getSurveyReportAttachmentById(surveyId, reportAttachmentId); + } + + /** + * Finds all authors belonging to the given survey attachment + * @param {number} reportAttachmentId the ID of the report attachment + * @return {Promise} Promise resolving all of the report authors + * @memberof AttachmentService + */ + async getSurveyAttachmentAuthors(reportAttachmentId: number): Promise { + return this.attachmentRepository.getSurveyReportAttachmentAuthors(reportAttachmentId); + } + + /** + *Insert Project Attachment + * + * @param {Express.Multer.File} file + * @param {number} projectId + * @param {string} attachmentType + * @param {string} key + * @return {*} {Promise<{ id: number; revision_count: number }>} + * @memberof AttachmentService + */ + async insertProjectAttachment( + file: Express.Multer.File, + projectId: number, + attachmentType: string, + key: string + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.insertProjectAttachment(file, projectId, attachmentType, key); + } + + /** + * Update Project Attachment + * + * @param {string} fileName + * @param {number} projectId + * @param {string} attachmentType + * @return {*} {Promise<{ id: number; revision_count: number }>} + * @memberof AttachmentService + */ + async updateProjectAttachment( + fileName: string, + projectId: number, + attachmentType: string + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.updateProjectAttachment(fileName, projectId, attachmentType); + } + + /** + * Get Project Attachment by filename + * + * @param {string} fileName + * @param {number} projectId + * @return {*} {Promise} + * @memberof AttachmentService + */ + async getProjectAttachmentByFileName(fileName: string, projectId: number): Promise { + return this.attachmentRepository.getProjectAttachmentByFileName(projectId, fileName); + } + + /** + * Update or Insert Project Attachment + * + * @param {Express.Multer.File} file + * @param {number} projectId + * @param {string} attachmentType + * @return {*} {Promise<{ id: number; revision_count: number; key: string }>} + * @memberof AttachmentService + */ + async upsertProjectAttachment( + file: Express.Multer.File, + projectId: number, + attachmentType: string + ): Promise<{ id: number; revision_count: number; key: string }> { + const key = generateS3FileKey({ projectId: projectId, fileName: file.originalname }); + + const getResponse = await this.getProjectAttachmentByFileName(file.originalname, projectId); + + let attachmentResult: { id: number; revision_count: number }; + + if (getResponse && getResponse.rowCount > 0) { + // Existing attachment with matching name found, update it + attachmentResult = await this.updateProjectAttachment(file.originalname, projectId, attachmentType); + } else { + // No matching attachment found, insert new attachment + attachmentResult = await this.insertProjectAttachment(file, projectId, attachmentType, key); + } + + return { ...attachmentResult, key }; + } + + /** + * Insert Project Report Attachment + * + * @param {string} fileName + * @param {string} fileSize + * @param {number} projectId + * @param {PostReportAttachmentMetadata} attachmentMeta + * @param {string} key + * @return {*} {Promise<{ id: number; revision_count: number }>} + * @memberof AttachmentService + */ + async insertProjectReportAttachment( + fileName: string, + fileSize: number, + projectId: number, + attachmentMeta: PostReportAttachmentMetadata, + key: string + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.insertProjectReportAttachment(fileName, fileSize, projectId, attachmentMeta, key); + } + + /** + * Update Project Report Attachment + * + * @param {string} fileName + * @param {number} projectId + * @param {PutReportAttachmentMetadata} attachmentMeta + * @return {*} {Promise<{ id: number; revision_count: number }>} + * @memberof AttachmentService + */ + async updateProjectReportAttachment( + fileName: string, + projectId: number, + attachmentMeta: PutReportAttachmentMetadata + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.updateProjectReportAttachment(fileName, projectId, attachmentMeta); + } + + /** + * Delete Project Report Attachment Authors + * + * @param {number} attachmentId + * @return {*} {Promise} + * @memberof AttachmentService + */ + async deleteProjectReportAttachmentAuthors(attachmentId: number): Promise { + return this.attachmentRepository.deleteProjectReportAttachmentAuthors(attachmentId); + } + + /** + * Insert Project Report Attachment Author + * + * @param {number} attachmentId + * @param {IReportAttachmentAuthor} author + * @return {*} {Promise} + * @memberof AttachmentService + */ + async insertProjectReportAttachmentAuthor( + attachmentId: number, + author: { first_name: string; last_name: string } + ): Promise { + return this.attachmentRepository.insertProjectReportAttachmentAuthor(attachmentId, author); + } + + /** + * Get Project Report Attachment by Filename + * + * @param {number} projectId + * @param {string} fileName + * @return {*} {Promise} + * @memberof AttachmentService + */ + async getProjectReportAttachmentByFileName(projectId: number, fileName: string): Promise { + return this.attachmentRepository.getProjectReportAttachmentByFileName(projectId, fileName); + } + + async upsertProjectReportAttachment( + file: Express.Multer.File, + projectId: number, + attachmentMeta: any + ): Promise<{ id: number; revision_count: number; key: string }> { + const key = generateS3FileKey({ projectId: projectId, fileName: file.originalname, folder: 'reports' }); + + const getResponse = await this.getProjectReportAttachmentByFileName(projectId, file.originalname); + + let metadata: any; + let attachmentResult: { id: number; revision_count: number }; + + if (getResponse && getResponse.rowCount > 0) { + // Existing attachment with matching name found, update it + metadata = new PutReportAttachmentMetadata(attachmentMeta); + attachmentResult = await this.updateProjectReportAttachment(file.originalname, projectId, metadata); + } else { + // No matching attachment found, insert new attachment + metadata = new PostReportAttachmentMetadata(attachmentMeta); + attachmentResult = await this.insertProjectReportAttachment( + file.originalname, + file.size, + projectId, + metadata, + key + ); + } + + // Delete any existing attachment author records + await this.deleteProjectReportAttachmentAuthors(attachmentResult.id); + + const promises = []; + + // Insert any new attachment author records + promises.push( + metadata.authors.map((author: IReportAttachmentAuthor) => + this.insertProjectReportAttachmentAuthor(attachmentResult.id, author) + ) + ); + await Promise.all(promises); + + return { ...attachmentResult, key }; + } + + async getProjectAttachmentS3Key(projectId: number, attachmentId: number): Promise { + return this.attachmentRepository.getProjectAttachmentS3Key(projectId, attachmentId); + } + + async getProjectReportAttachmentS3Key(projectId: number, attachmentId: number): Promise { + return this.attachmentRepository.getProjectReportAttachmentS3Key(projectId, attachmentId); + } + + async updateProjectReportAttachmentMetadata( + projectId: number, + attachmentId: number, + metadata: PutReportAttachmentMetadata + ): Promise { + return this.attachmentRepository.updateProjectReportAttachmentMetadata(projectId, attachmentId, metadata); + } + + async deleteProjectAttachment(attachmentId: number): Promise<{ key: string }> { + return this.attachmentRepository.deleteProjectAttachment(attachmentId); + } + + async deleteProjectReportAttachment(attachmentId: number): Promise<{ key: string }> { + return this.attachmentRepository.deleteProjectReportAttachment(attachmentId); + } + + async insertSurveyReportAttachment( + fileName: string, + fileSize: number, + surveyId: number, + attachmentMeta: PostReportAttachmentMetadata, + key: string + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.insertSurveyReportAttachment(fileName, fileSize, surveyId, attachmentMeta, key); + } + + async updateSurveyReportAttachment( + fileName: string, + surveyId: number, + attachmentMeta: PutReportAttachmentMetadata + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.updateSurveyReportAttachment(fileName, surveyId, attachmentMeta); + } + + async deleteSurveyReportAttachmentAuthors(attachmentId: number): Promise { + return this.attachmentRepository.deleteSurveyReportAttachmentAuthors(attachmentId); + } + + async insertSurveyReportAttachmentAuthor( + attachmentId: number, + author: { first_name: string; last_name: string } + ): Promise { + return this.attachmentRepository.insertSurveyReportAttachmentAuthor(attachmentId, author); + } + + async getSurveyReportAttachmentByFileName(surveyId: number, fileName: string): Promise { + return this.attachmentRepository.getSurveyReportAttachmentByFileName(surveyId, fileName); + } + + async upsertSurveyReportAttachment( + file: Express.Multer.File, + projectId: number, + surveyId: number, + attachmentMeta: any + ): Promise<{ id: number; revision_count: number; key: string }> { + const key = generateS3FileKey({ + projectId: projectId, + surveyId: surveyId, + fileName: file.originalname, + folder: 'reports' + }); + + const getResponse = await this.getSurveyReportAttachmentByFileName(surveyId, file.originalname); + + let metadata; + let attachmentResult: { id: number; revision_count: number }; + + if (getResponse && getResponse.rowCount > 0) { + // Existing attachment with matching name found, update it + metadata = new PutReportAttachmentMetadata(attachmentMeta); + attachmentResult = await this.updateSurveyReportAttachment(file.originalname, surveyId, metadata); + } else { + // No matching attachment found, insert new attachment + metadata = new PostReportAttachmentMetadata(attachmentMeta); + attachmentResult = await this.insertSurveyReportAttachment( + file.originalname, + file.size, + surveyId, + new PostReportAttachmentMetadata(attachmentMeta), + key + ); + } + + // Delete any existing attachment author records + await this.deleteSurveyReportAttachmentAuthors(attachmentResult.id); + + const promises = []; + + // Insert any new attachment author records + promises.push( + metadata.authors.map((author) => this.insertSurveyReportAttachmentAuthor(attachmentResult.id, author)) + ); + + await Promise.all(promises); + + return { ...attachmentResult, key }; + } + + async deleteSurveyReportAttachment(attachmentId: number): Promise<{ key: string }> { + return this.attachmentRepository.deleteSurveyReportAttachment(attachmentId); + } + + async deleteSurveyAttachment(attachmentId: number): Promise<{ key: string }> { + return this.attachmentRepository.deleteSurveyAttachment(attachmentId); + } + + async getSurveyAttachmentS3Key(surveyId: number, attachmentId: number): Promise { + return this.attachmentRepository.getSurveyAttachmentS3Key(surveyId, attachmentId); + } + + async getSurveyReportAttachmentS3Key(surveyId: number, attachmentId: number): Promise { + return this.attachmentRepository.getSurveyReportAttachmentS3Key(surveyId, attachmentId); + } + + async updateSurveyReportAttachmentMetadata( + surveyId: number, + attachmentId: number, + metadata: PutReportAttachmentMetadata + ): Promise { + return this.attachmentRepository.updateSurveyReportAttachmentMetadata(surveyId, attachmentId, metadata); + } + + async updateSurveyAttachment( + surveyId: number, + fileName: string, + fileType: string + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.updateSurveyAttachment(surveyId, fileName, fileType); + } + + async insertSurveyAttachment( + fileName: string, + fileSize: number, + fileType: string, + surveyId: number, + key: string + ): Promise<{ id: number; revision_count: number }> { + return this.attachmentRepository.insertSurveyAttachment(fileName, fileSize, fileType, surveyId, key); + } + + async getSurveyAttachmentByFileName(fileName: string, surveyId: number): Promise { + return this.attachmentRepository.getSurveyAttachmentByFileName(fileName, surveyId); + } + + async upsertSurveyAttachment( + file: Express.Multer.File, + projectId: number, + surveyId: number, + attachmentType: string + ): Promise<{ id: number; revision_count: number; key: string }> { + const key = generateS3FileKey({ + projectId: projectId, + surveyId: surveyId, + fileName: file.originalname, + folder: 'reports' + }); + + const getResponse = await this.getSurveyReportAttachmentByFileName(surveyId, file.originalname); + + let attachmentResult: { id: number; revision_count: number }; + + if (getResponse && getResponse.rowCount > 0) { + // Existing attachment with matching name found, update it + attachmentResult = await this.updateSurveyAttachment(surveyId, file.originalname, attachmentType); + } else { + // No matching attachment found, insert new attachment + attachmentResult = await this.insertSurveyAttachment(file.originalname, file.size, attachmentType, surveyId, key); + } + + return { ...attachmentResult, key }; + } +} diff --git a/api/src/services/base-repository.ts b/api/src/services/base-repository.ts new file mode 100644 index 0000000000..bde6105710 --- /dev/null +++ b/api/src/services/base-repository.ts @@ -0,0 +1,15 @@ +import { IDBConnection } from '../database/db'; + +/** + * Base class for repositories. + * + * @export + * @class BaseRepository + */ +export class BaseRepository { + connection: IDBConnection; + + constructor(connection: IDBConnection) { + this.connection = connection; + } +} diff --git a/api/src/services/code-service.ts b/api/src/services/code-service.ts index a58385a1ef..61468d00d8 100644 --- a/api/src/services/code-service.ts +++ b/api/src/services/code-service.ts @@ -1,7 +1,7 @@ import { coordinator_agency, region, regional_offices } from '../constants/codes'; import { queries } from '../queries/queries'; import { getLogger } from '../utils/logger'; -import { DBService } from './service'; +import { DBService } from './db-service'; const defaultLog = getLogger('queries/code-queries'); diff --git a/api/src/services/service.ts b/api/src/services/db-service.ts similarity index 100% rename from api/src/services/service.ts rename to api/src/services/db-service.ts diff --git a/api/src/services/dwc-service.test.ts b/api/src/services/dwc-service.test.ts new file mode 100644 index 0000000000..6508e1b0ce --- /dev/null +++ b/api/src/services/dwc-service.test.ts @@ -0,0 +1,172 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import * as spatial_utils from '../utils/spatial-utils'; +import { getMockDBConnection } from '../__mocks__/db'; +import { DwCService } from './dwc-service'; +import { TaxonomyService } from './taxonomy-service'; +chai.use(sinonChai); + +describe('DwCService', () => { + it('constructs', () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + + expect(dwcService).to.be.instanceof(DwCService); + }); + + describe('decorateTaxonIDs', () => { + afterEach(() => { + sinon.restore(); + }); + + it('does not enrich the jsonObject if no taxonIDs exists', async () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + + const jsonObject = { id: 1, some_text: 'abcd' }; + + const enrichedJSON = await dwcService.decorateTaxonIDs(jsonObject); + + expect(enrichedJSON).to.be.eql(jsonObject); + expect(enrichedJSON).not.to.be.eql({ id: 1 }); + }); + + it('enriches the jsonObject when it has one taxonID', async () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + + const getEnrichedDataForSpeciesCodeStub = sinon + .stub(TaxonomyService.prototype, 'getEnrichedDataForSpeciesCode') + .resolves({ scientificName: 'some scientific name', englishName: 'some common name' }); + + const jsonObject = { + item_with_depth_1: { + item_with_depth_2: { taxonID: 'M-OVCA' } + } + }; + + const enrichedJSON = await dwcService.decorateTaxonIDs(jsonObject); + + expect(getEnrichedDataForSpeciesCodeStub).to.have.been.called; + expect(getEnrichedDataForSpeciesCodeStub).to.have.been.calledWith('M-OVCA'); + expect(enrichedJSON.item_with_depth_1.item_with_depth_2.scientificName).to.equal('some scientific name'); + expect(enrichedJSON.item_with_depth_1.item_with_depth_2.taxonID).to.equal('M-OVCA'); + expect(enrichedJSON.item_with_depth_1.item_with_depth_2.vernacularName).to.equal('some common name'); + }); + + it('enriches the jsonObject when it has multiple taxonIDs at different depths', async () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + + const getEnrichedDataForSpeciesCodeStub = sinon + .stub(TaxonomyService.prototype, 'getEnrichedDataForSpeciesCode') + .resolves({ scientificName: 'some scientific name', englishName: 'some common name' }); + + const jsonObject = { + item_with_depth_1: { + taxonID: 'M_ALAM', + item_with_depth_2: { taxonID: 'M-OVCA', something: 'abcd' } + } + }; + + const enrichedJSON = await dwcService.decorateTaxonIDs(jsonObject); + + expect(getEnrichedDataForSpeciesCodeStub).to.have.been.calledTwice; + expect(enrichedJSON.item_with_depth_1).to.eql({ + item_with_depth_2: { + taxonID: 'M-OVCA', + scientificName: 'some scientific name', + vernacularName: 'some common name', + something: 'abcd' + }, + scientificName: 'some scientific name', + taxonID: 'M_ALAM', + vernacularName: 'some common name' + }); + expect(enrichedJSON.item_with_depth_1.item_with_depth_2.taxonID).to.equal('M-OVCA'); + expect(enrichedJSON.item_with_depth_1.item_with_depth_2.vernacularName).to.equal('some common name'); + }); + }); + + describe('decorateLatLong', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns if decimalLatitude and decimalLongitude are filled ', async () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + const jsonObject = { + item_with_depth_1: { + item_with_depth_2: [{ verbatimCoordinates: '', decimalLatitude: 123, decimalLongitude: 123 }] + } + }; + + const newJson = await dwcService.decorateLatLong(jsonObject); + + expect(newJson).to.eql(jsonObject); + }); + + it('succeeds and decorates Lat Long', async () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + const jsonObject = { + item_with_depth_1: { + item_with_depth_2: [{ verbatimCoordinates: '12 12314 12241' }] + } + }; + + sinon.stub(spatial_utils, 'parseUTMString').returns({ + easting: 1, + northing: 2, + zone_letter: 'a', + zone_number: 3, + zone_srid: 4 + }); + + sinon.stub(spatial_utils, 'utmToLatLng').returns({ latitude: 1, longitude: 2 }); + + const response = await await dwcService.decorateLatLong(jsonObject); + + expect(response).to.eql({ + item_with_depth_1: { + item_with_depth_2: [{ verbatimCoordinates: '12 12314 12241', decimalLatitude: 1, decimalLongitude: 2 }] + } + }); + }); + }); + + //TODO: this needs to be examined thoroughly + describe('decorateDwCJSON', () => { + afterEach(() => { + sinon.restore(); + }); + + it('runs decoration and saves data', async () => { + const dbConnectionObj = getMockDBConnection(); + + const dwcService = new DwCService(dbConnectionObj); + + const decorateLatLongStub = sinon + .stub(DwCService.prototype, 'decorateLatLong') + .resolves({ id: 2, lat: 1, long: 2 }); + const decorateTaxonIDsStub = sinon + .stub(DwCService.prototype, 'decorateTaxonIDs') + .resolves({ id: 2, lat: 1, long: 2, taxonID: 3 }); + + const response = await dwcService.decorateDwCJSON({}); + + expect(response).to.eql({ id: 2, lat: 1, long: 2, taxonID: 3 }); + expect(decorateLatLongStub).to.be.calledOnce; + expect(decorateTaxonIDsStub).to.be.calledOnce; + }); + }); +}); diff --git a/api/src/services/dwc-service.ts b/api/src/services/dwc-service.ts new file mode 100644 index 0000000000..ed0c8f4d60 --- /dev/null +++ b/api/src/services/dwc-service.ts @@ -0,0 +1,138 @@ +import jsonpatch, { Operation } from 'fast-json-patch'; +import { JSONPath } from 'jsonpath-plus'; +import { IDBConnection } from '../database/db'; +import { parseUTMString, utmToLatLng } from '../utils/spatial-utils'; +import { DBService } from './db-service'; +import { TaxonomyService } from './taxonomy-service'; +/** + * Service to produce DWC data for a project. + * + * @see https://eml.ecoinformatics.org for EML specification + * @see https://knb.ecoinformatics.org/emlparser/ for an online EML validator. + * @export + * @class EmlService + * @extends {DBService} + */ +export class DwCService extends DBService { + constructor(connection: IDBConnection) { + super(connection); + } + + /** + * Find all nodes that contain `taxonID` and update them to include additional taxonomic information. + * + * @param {string} jsonObject + * @return {*} {Promise} + * @memberof DwCService + */ + async decorateTaxonIDs(jsonObject: Record): Promise> { + const taxonomyService = new TaxonomyService(); + + // Find and return all nodes that contain `taxonID` + const pathsToPatch = JSONPath({ path: '$..[taxonID]^', json: jsonObject, resultType: 'all' }); + + const patchOperations: Operation[] = []; + + // Build patch operations + await Promise.all( + pathsToPatch.map(async (item: any) => { + const enrichedData = await taxonomyService.getEnrichedDataForSpeciesCode(item.value['taxonID']); + + if (!enrichedData) { + // No matching taxon information found for provided taxonID code + return; + } + + const taxonIdPatch: Operation = { + op: 'replace', + path: item.pointer + '/taxonID', + value: item.value['taxonID'] + }; + + const scientificNamePatch: Operation = { + op: 'add', + path: item.pointer + '/scientificName', + value: enrichedData?.scientificName + }; + + const vernacularNamePatch: Operation = { + op: 'add', + path: item.pointer + '/vernacularName', + value: enrichedData?.englishName + }; + + patchOperations.push(taxonIdPatch, scientificNamePatch, vernacularNamePatch); + }) + ); + + // Apply patch operations + return jsonpatch.applyPatch(jsonObject, patchOperations).newDocument; + } + + /** + * Decorates the DwC json object + * + * @param {Record} jsonObject + * @return {*} {Promise>} + * @memberof DwCService + */ + async decorateDwCJSON(jsonObject: Record): Promise> { + const latlongDec = await this.decorateLatLong(jsonObject); + + const taxonDec = await this.decorateTaxonIDs(latlongDec); + + return taxonDec; + } + + /** + * Decorate Lat Long details for Location data + * + * @param {string} jsonObject + * @return {*} {Promise} + * @memberof DwCService + */ + async decorateLatLong(jsonObject: Record): Promise> { + const pathsToPatch = JSONPath({ + path: '$..[verbatimCoordinates]^', + json: jsonObject, + resultType: 'all' + }); + + const patchOperations: Operation[] = []; + + pathsToPatch.forEach(async (item: any) => { + if ( + Object.prototype.hasOwnProperty.call(item.value, 'decimalLatitude') && + Object.prototype.hasOwnProperty.call(item.value, 'decimalLongitude') + ) { + if (!!item.value['decimalLatitude'] && !!item.value['decimalLongitude']) { + return jsonObject; + } + } + + const verbatimCoordinates = parseUTMString(item.value['verbatimCoordinates']); + + if (!verbatimCoordinates) { + return; + } + + const latLongValues = utmToLatLng(verbatimCoordinates); + + const decimalLatitudePatch: Operation = { + op: 'add', + path: item.pointer + '/decimalLatitude', + value: latLongValues.latitude + }; + + const decimalLongitudePatch: Operation = { + op: 'add', + path: item.pointer + '/decimalLongitude', + value: latLongValues.longitude + }; + + patchOperations.push(decimalLatitudePatch, decimalLongitudePatch); + }); + + return jsonpatch.applyPatch(jsonObject, patchOperations).newDocument; + } +} diff --git a/api/src/services/eml-service.ts b/api/src/services/eml-service.ts index 23f8ed77ca..716f4f12f5 100644 --- a/api/src/services/eml-service.ts +++ b/api/src/services/eml-service.ts @@ -5,12 +5,20 @@ import { coordEach } from '@turf/meta'; import jsonpatch from 'fast-json-patch'; import xml2js from 'xml2js'; import { IDBConnection } from '../database/db'; -import { IGetProject } from '../models/project-view'; -import { SurveyObject } from '../models/survey-view'; +import { + GetAttachmentsData as GetProjectAttachmentsData, + GetReportAttachmentsData as GetProjectReportAttachmentsData, + IGetProject +} from '../models/project-view'; +import { + GetAttachmentsData as GetSurveyAttachmentsData, + GetReportAttachmentsData as GetSurveyReportAttachmentsData, + SurveyObject +} from '../models/survey-view'; import { getDbCharacterSystemMetaDataConstantSQL } from '../queries/codes/db-constant-queries'; import { CodeService, IAllCodeSets } from './code-service'; +import { DBService } from './db-service'; import { ProjectService } from './project-service'; -import { DBService } from './service'; import { SurveyService } from './survey-service'; import { TaxonomyService } from './taxonomy-service'; @@ -36,13 +44,20 @@ type EMLDBConstants = { EML_INTELLECTUAL_RIGHTS: string; }; +type SurveyObjectWithAttachments = SurveyObject & { + attachments?: GetSurveyAttachmentsData; + report_attachments?: GetSurveyReportAttachmentsData; +}; + type Cache = { projectData?: IGetProject; - surveyData?: SurveyObject[]; + surveyData?: SurveyObjectWithAttachments[]; + projectAttachmentData?: GetProjectAttachmentsData; + projectReportAttachmentData?: GetProjectReportAttachmentsData; codes?: IAllCodeSets; }; -type BuildProjectEMLOptions = { +export type BuildProjectEMLOptions = { /** * Whether or not to include typically non-public data in the EML. Defaults to `false`. * @@ -108,14 +123,14 @@ export class EmlService extends DBService { /** * Compiles and returns the project metadata as an Ecological Metadata Language (EML) compliant XML string. * - * @param {BuildProjectEMLOptions} options - * @return {*} + * @param {BuildProjectEMLOptions} [options] + * @return {*} {Promise} * @memberof EmlService */ - async buildProjectEml(options: BuildProjectEMLOptions) { - this.includeSensitiveData = options.includeSensitiveData || false; + async buildProjectEml(options?: BuildProjectEMLOptions): Promise { + this.includeSensitiveData = options?.includeSensitiveData || false; - this.surveyIds = options.surveyIds; + this.surveyIds = options?.surveyIds || []; await this.loadProjectData(); await this.loadSurveyData(); @@ -188,13 +203,25 @@ export class EmlService extends DBService { return this.cache.projectData; } + get projectAttachmentData(): GetProjectAttachmentsData | undefined { + return this.cache.projectAttachmentData; + } + + get projectReportAttachmentData(): GetProjectReportAttachmentsData | undefined { + return this.cache.projectReportAttachmentData; + } + async loadProjectData() { const projectData = await this.projectService.getProjectById(this.projectId); + const attachmentData = await this.projectService.getAttachmentsData(this.projectId); + const attachmentReportData = await this.projectService.getReportAttachmentsData(this.projectId); this.cache.projectData = projectData; + this.cache.projectAttachmentData = attachmentData; + this.cache.projectReportAttachmentData = attachmentReportData; } - get surveyData(): SurveyObject[] { + get surveyData(): SurveyObjectWithAttachments[] { if (!this.cache.surveyData) { throw Error('Survey data was not loaded'); } @@ -208,11 +235,19 @@ export class EmlService extends DBService { const allSurveyIds = response.map((item) => item.id); // if `BuildProjectEMLOptions.surveyIds` was provided then filter out any ids not in the list - const includedSurveyIds = allSurveyIds.filter((item) => !this.surveyIds || this.surveyIds?.includes(item)); + const includedSurveyIds = allSurveyIds.filter((item) => !this.surveyIds?.length || this.surveyIds?.includes(item)); const surveyData = await this.surveyService.getSurveysByIds(includedSurveyIds); this.cache.surveyData = surveyData; + + this.cache.surveyData.forEach( + async (item) => (item.attachments = await this.surveyService.getAttachmentsData(item.survey_details.id)) + ); + this.cache.surveyData.forEach( + async (item) => + (item.report_attachments = await this.surveyService.getReportAttachmentsData(item.survey_details.id)) + ); } buildEMLSection() { @@ -251,11 +286,12 @@ export class EmlService extends DBService { $: { system: this.constants.EML_PROVIDER_URL, id: this.packageId }, title: options?.datasetTitle || this.projectData.project.project_name, creator: this.getDatasetCreator(), - ...(this.projectData.project.publish_date && { pubDate: this.projectData.project.publish_date }), metadataProvider: { organizationName: this.constants.EML_ORGANIZATION_NAME, onlineUrl: this.constants.EML_ORGANIZATION_URL }, + //EML specification expects short ISO format + pubDate: new Date().toISOString().substring(0, 10), language: 'English', contact: this.getProjectContact(), project: { @@ -373,37 +409,39 @@ export class EmlService extends DBService { }); } - if (this.includeSensitiveData) { - // only include permits if sensitive data is enabled - if (this.projectData.permit.permits?.length) { - data.push({ - describes: this.projectData.project.uuid, - metadata: { - permits: { - permit: this.projectData.permit.permits.map((item) => { - return { permitType: item.permit_type, permitNumber: item.permit_number }; - }) - } - } - }); - } - } - - if (this.includeSensitiveData) { - // only include permits if sensitive data is enabled - this.surveyData.forEach((item) => { - if (item.permit.permit_number && item.permit.permit_type) { - data.push({ - describes: item.survey_details.uuid, - metadata: { - permits: { - permit: { permitType: item.permit.permit_type, permitNumber: item.permit.permit_number } - } - } - }); - } - }); - } + // TODO add back when survey supports permits + // if (this.includeSensitiveData) { + // // only include permits if sensitive data is enabled + // if (this.projectData.permit.permits?.length) { + // data.push({ + // describes: this.projectData.project.uuid, + // metadata: { + // permits: { + // permit: this.projectData.permit.permits.map((item) => { + // return { permitType: item.permit_type, permitNumber: item.permit_number }; + // }) + // } + // } + // }); + // } + // } + + // TODO add back when survey supports permits + // if (this.includeSensitiveData) { + // // only include permits if sensitive data is enabled + // this.surveyData.forEach((item) => { + // if (item.permit.permit_number && item.permit.permit_type) { + // data.push({ + // describes: item.survey_details.uuid, + // metadata: { + // permits: { + // permit: { permitType: item.permit.permit_type, permitNumber: item.permit.permit_number } + // } + // } + // }); + // } + // }); + // } this.surveyData.forEach((item) => { if (item.proprietor) { @@ -437,13 +475,60 @@ export class EmlService extends DBService { } }); - this.surveyData.forEach((item) => { + if (this.projectAttachmentData?.attachmentDetails.length) { + data.push({ + describes: this.projectData.project.uuid, + metadata: { + projectAttachments: { + projectAttachment: this.projectAttachmentData.attachmentDetails.map((item) => { + return item; + }) + } + } + }); + } + + if (this.projectReportAttachmentData?.attachmentDetails.length) { data.push({ - describes: item.survey_details.uuid, + describes: this.projectData.project.uuid, metadata: { - surveyedAllAreas: item.purpose_and_methodology.surveyed_all_areas === 'true' || false + projectReportAttachments: { + projectReportAttachment: this.projectReportAttachmentData.attachmentDetails.map((item) => { + return item; + }) + } } }); + } + + this.surveyData.forEach((item) => { + if (item.attachments?.attachmentDetails.length) { + data.push({ + describes: item.survey_details.uuid, + metadata: { + surveyAttachments: { + surveyAttachment: item.attachments?.attachmentDetails.map((item) => { + return item; + }) + } + } + }); + } + }); + + this.surveyData.forEach((item) => { + if (item.report_attachments?.attachmentDetails.length) { + data.push({ + describes: item.survey_details.uuid, + metadata: { + surveyReportAttachments: { + surveyReportAttachment: item.report_attachments?.attachmentDetails.map((item) => { + return item; + }) + } + } + }); + } }); jsonpatch.applyOperation(this.data, { @@ -521,11 +606,11 @@ export class EmlService extends DBService { * Get all contacts for the survey. * * @ - * @param {SurveyObject} surveyData + * @param {SurveyObjectWithAttachments} surveyData * @return {*} {Record[]} * @memberof EmlService */ - getSurveyPersonnel(surveyData: SurveyObject): Record[] { + getSurveyPersonnel(surveyData: SurveyObjectWithAttachments): Record[] { return [ { individualName: { @@ -561,7 +646,7 @@ export class EmlService extends DBService { }; } - getSurveyFundingSources(surveyData: SurveyObject): Record { + getSurveyFundingSources(surveyData: SurveyObjectWithAttachments): Record { if (!surveyData.funding.funding_sources.length) { return {}; } @@ -603,7 +688,7 @@ export class EmlService extends DBService { }; } - getSurveyTemporalCoverageEML(surveyData: SurveyObject): Record { + getSurveyTemporalCoverageEML(surveyData: SurveyObjectWithAttachments): Record { if (!surveyData.survey_details.end_date) { // no end date return { @@ -670,7 +755,7 @@ export class EmlService extends DBService { }; } - getSurveyGeographicCoverageEML(surveyData: SurveyObject): Record { + getSurveyGeographicCoverageEML(surveyData: SurveyObjectWithAttachments): Record { if (!surveyData.location.geometry?.length) { return {}; } @@ -719,7 +804,7 @@ export class EmlService extends DBService { }; } - async getSurveyFocalTaxonomicCoverage(surveyData: SurveyObject): Promise> { + async getSurveyFocalTaxonomicCoverage(surveyData: SurveyObjectWithAttachments): Promise> { const taxonomySearchService = new TaxonomyService(); // TODO include ancillary_species alongside focal_species? @@ -741,7 +826,7 @@ export class EmlService extends DBService { return { taxonomicClassification: taxonomicClassifications }; } - async getSurveyDesignDescription(surveyData: SurveyObject): Promise> { + async getSurveyDesignDescription(surveyData: SurveyObjectWithAttachments): Promise> { return { description: { section: [ @@ -784,7 +869,7 @@ export class EmlService extends DBService { return Promise.all(promises); } - async getSurveyEML(surveyData: SurveyObject): Promise> { + async getSurveyEML(surveyData: SurveyObjectWithAttachments): Promise> { return { $: { id: surveyData.survey_details.uuid, system: this.constants.EML_PROVIDER_URL }, title: surveyData.survey_details.survey_name, diff --git a/api/src/services/error-service.test.ts b/api/src/services/error-service.test.ts new file mode 100644 index 0000000000..b69944f3c5 --- /dev/null +++ b/api/src/services/error-service.test.ts @@ -0,0 +1,121 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { ErrorRepository } from '../repositories/error-repository'; +import { SubmissionError } from '../utils/submission-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { ErrorService } from './error-service'; + +chai.use(sinonChai); + +describe('ErrorService', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('insertSubmissionStatus', () => { + it('should return submission_id and submission_status_type_id on insert', async () => { + const mockDBConnection = getMockDBConnection(); + const errorService = new ErrorService(mockDBConnection); + + const repo = sinon + .stub(ErrorRepository.prototype, 'insertSubmissionStatus') + .resolves({ submission_status_id: 1, submission_status_type_id: 1 }); + + const response = await errorService.insertSubmissionStatus(1, SUBMISSION_STATUS_TYPE.DARWIN_CORE_VALIDATED); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql({ submission_status_id: 1, submission_status_type_id: 1 }); + }); + }); + + describe('insertSubmissionMessage', () => { + it('should return submission message id and submission_message_type_id', async () => { + const mockDBConnection = getMockDBConnection(); + const errorService = new ErrorService(mockDBConnection); + + const repo = sinon + .stub(ErrorRepository.prototype, 'insertSubmissionMessage') + .resolves({ submission_message_id: 1, submission_message_type_id: 1 }); + + const response = await errorService.insertSubmissionMessage( + 1, + SUBMISSION_MESSAGE_TYPE.FAILED_GET_OCCURRENCE, + 'some message' + ); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql({ submission_message_id: 1, submission_message_type_id: 1 }); + }); + }); + + describe('insertSubmissionStatusAndMessage', () => { + it('should return submission status id and message id', async () => { + const mockDBConnection = getMockDBConnection(); + const errorService = new ErrorService(mockDBConnection); + + const mockMessageResponse = { submission_message_id: 1, submission_message_type_id: 1 }; + const mockStatusResponse = { submission_status_id: 2, submission_status_type_id: 2 }; + + const repoStatus = sinon.stub(ErrorRepository.prototype, 'insertSubmissionStatus').resolves(mockStatusResponse); + + const repoMessage = sinon + .stub(ErrorRepository.prototype, 'insertSubmissionMessage') + .resolves(mockMessageResponse); + + const response = await errorService.insertSubmissionStatusAndMessage( + 1, + SUBMISSION_STATUS_TYPE.FAILED_VALIDATION, + SUBMISSION_MESSAGE_TYPE.FAILED_PARSE_SUBMISSION, + 'message' + ); + expect(repoStatus).to.be.calledOnce; + expect(repoMessage).to.be.calledOnce; + expect(response).to.be.eql({ + submission_status_id: 2, + submission_message_id: 1 + }); + }); + }); + + describe('insertSubmissionError', () => { + it('should insert a submission status id and an array of submission messages', async () => { + const mockDBConnection = getMockDBConnection(); + const errorService = new ErrorService(mockDBConnection); + + const mockMessageResponse = { submission_message_id: 1, submission_message_type_id: 1 }; + const mockStatusResponse = { submission_status_id: 2, submission_status_type_id: 2 }; + + const repoStatusStub = sinon + .stub(ErrorRepository.prototype, 'insertSubmissionStatus') + .resolves(mockStatusResponse); + + const repoMessageStub = sinon + .stub(ErrorRepository.prototype, 'insertSubmissionMessage') + .resolves(mockMessageResponse); + + const submissionError = { + status: SUBMISSION_STATUS_TYPE.INVALID_MEDIA, + submissionMessages: [ + { + type: SUBMISSION_MESSAGE_TYPE.FAILED_PARSE_SUBMISSION, + description: 'there is a problem in row 10', + errorCode: 'some error code' + } + ] + }; + + await errorService.insertSubmissionError(1, submissionError as SubmissionError); + + expect(repoStatusStub).to.be.calledOnce; + expect(repoMessageStub).to.be.calledOnce; + expect(repoMessageStub).to.have.been.calledWith( + mockStatusResponse.submission_status_id, + submissionError.submissionMessages[0].type, + submissionError.submissionMessages[0].description + ); + }); + }); +}); diff --git a/api/src/services/error-service.ts b/api/src/services/error-service.ts new file mode 100644 index 0000000000..2a99f9d8ad --- /dev/null +++ b/api/src/services/error-service.ts @@ -0,0 +1,103 @@ +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { IDBConnection } from '../database/db'; +import { ErrorRepository } from '../repositories/error-repository'; +import { SubmissionError } from '../utils/submission-error'; +import { DBService } from './db-service'; + +export class ErrorService extends DBService { + errorRepository: ErrorRepository; + + constructor(connection: IDBConnection) { + super(connection); + + this.errorRepository = new ErrorRepository(connection); + } + + /** + * Inserts both the status and message of a submission + * + * @param {number} submissionId + * @param {SUBMISSION_STATUS_TYPE} submissionStatusType + * @param {SUBMISSION_MESSAGE_TYPE} submissionMessageType + * @param {string} submissionMessage + * @return {*} {Promise<{ + * submission_status_id: number; + * submission_message_id: number; + * }>} + * @memberof SubmissionService + */ + async insertSubmissionStatusAndMessage( + submissionId: number, + submissionStatusType: SUBMISSION_STATUS_TYPE, + submissionMessageType: SUBMISSION_MESSAGE_TYPE, + submissionMessage: string + ): Promise<{ + submission_status_id: number; + submission_message_id: number; + }> { + const submission_status_id = (await this.errorRepository.insertSubmissionStatus(submissionId, submissionStatusType)) + .submission_status_id; + + const submission_message_id = ( + await this.errorRepository.insertSubmissionMessage(submission_status_id, submissionMessageType, submissionMessage) + ).submission_message_id; + + return { + submission_status_id, + submission_message_id + }; + } + + /** + * Insert a submission status record. + * + * @param {number} submissionId + * @param {SUBMISSION_STATUS_TYPE} submissionStatusType + * @return {*} {Promise<{ + * submission_status_id: number; + * submission_status_type_id: number; + * }>} + * @memberof SubmissionService + */ + async insertSubmissionStatus( + submissionId: number, + submissionStatusType: SUBMISSION_STATUS_TYPE + ): Promise<{ + submission_status_id: number; + submission_status_type_id: number; + }> { + return this.errorRepository.insertSubmissionStatus(submissionId, submissionStatusType); + } + + /** + * Insert a submission m record. + * + * @param {number} submissionId + * @param {SUBMISSION_STATUS_TYPE} submissionStatusType + * @return {*} {Promise<{ + * submission_status_id: number; + * submission_status_type_id: number; + * }>} + * @memberof SubmissionService + */ + async insertSubmissionMessage( + submissionStatusId: number, + submissionMessageType: SUBMISSION_MESSAGE_TYPE, + submissionMessage: string + ): Promise<{ + submission_message_id: number; + submission_message_type_id: number; + }> { + return this.errorRepository.insertSubmissionMessage(submissionStatusId, submissionMessageType, submissionMessage); + } + + async insertSubmissionError(submissionId: number, error: SubmissionError) { + const submission_status_id = (await this.errorRepository.insertSubmissionStatus(submissionId, error.status)) + .submission_status_id; + const promises = error.submissionMessages.map((message) => { + return this.errorRepository.insertSubmissionMessage(submission_status_id, message.type, message.description); + }); + + await Promise.all(promises); + } +} diff --git a/api/src/services/gcnotify-service.test.ts b/api/src/services/gcnotify-service.test.ts index cc035361df..0fab746ffd 100644 --- a/api/src/services/gcnotify-service.test.ts +++ b/api/src/services/gcnotify-service.test.ts @@ -3,9 +3,8 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import { ApiError } from '../errors/custom-error'; -import { IgcNotifyGenericMessage } from '../models/gcnotify'; -import { GCNotifyService } from './gcnotify-service'; +import { ApiError } from '../errors/api-error'; +import { GCNotifyService, IgcNotifyGenericMessage } from './gcnotify-service'; chai.use(sinonChai); diff --git a/api/src/services/gcnotify-service.ts b/api/src/services/gcnotify-service.ts index 6ec439d21b..6a72e2e511 100644 --- a/api/src/services/gcnotify-service.ts +++ b/api/src/services/gcnotify-service.ts @@ -1,6 +1,45 @@ import axios from 'axios'; -import { ApiError, ApiErrorType } from '../errors/custom-error'; -import { IgcNotifyGenericMessage, IgcNotifyPostReturn } from '../models/gcnotify'; +import { ApiError, ApiErrorType } from '../errors/api-error'; + +export interface IgcNotifyPostReturn { + content: object; + id: string; + reference: string; + scheduled_for: string; + template: object; + uri: string; +} + +export interface IgcNotifyGenericMessage { + subject: string; + header: string; + body1: string; + body2: string; + footer: string; +} + +export interface ISendGCNotifyEmailMessage { + email_address: string; + template_id: string; + personalisation: { + subject: string; + header: string; + main_body1: string; + main_body2: string; + footer: string; + }; +} + +export interface ISendGCNotifySMSMessage { + phone_number: string; + template_id: string; + personalisation: { + header: string; + main_body1: string; + main_body2: string; + footer: string; + }; +} const EMAIL_TEMPLATE = process.env.GCNOTIFY_ONBOARDING_REQUEST_EMAIL_TEMPLATE || ''; const SMS_TEMPLATE = process.env.GCNOTIFY_ONBOARDING_REQUEST_SMS_TEMPLATE || ''; @@ -24,7 +63,7 @@ export class GCNotifyService { * @returns {IgcNotifyPostReturn} */ async sendEmailGCNotification(emailAddress: string, message: IgcNotifyGenericMessage): Promise { - const data = { + const data: ISendGCNotifyEmailMessage = { email_address: emailAddress, template_id: EMAIL_TEMPLATE, personalisation: { @@ -56,7 +95,7 @@ export class GCNotifyService { * @returns {IgcNotifyPostReturn} */ async sendPhoneNumberGCNotification(sms: string, message: IgcNotifyGenericMessage): Promise { - const data = { + const data: ISendGCNotifySMSMessage = { phone_number: sms, template_id: SMS_TEMPLATE, personalisation: { diff --git a/api/src/services/keycloak-service.test.ts b/api/src/services/keycloak-service.test.ts index 8d3a9f0822..78556887df 100644 --- a/api/src/services/keycloak-service.test.ts +++ b/api/src/services/keycloak-service.test.ts @@ -2,23 +2,28 @@ import axios from 'axios'; import chai, { expect } from 'chai'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import { ApiGeneralError } from '../errors/custom-error'; +import { ApiGeneralError } from '../errors/api-error'; import { KeycloakService } from './keycloak-service'; chai.use(sinonChai); describe('KeycloakService', () => { + beforeEach(() => { + process.env.KEYCLOAK_ADMIN_HOST = 'host'; + process.env.KEYCLOAK_REALM = 'realm'; + process.env.KEYCLOAK_API_HOST = 'api-host'; + process.env.KEYCLOAK_ADMIN_USERNAME = 'admin'; + process.env.KEYCLOAK_ADMIN_PASSWORD = 'password'; + process.env.KEYCLOAK_INTEGRATION_ID = '1234'; + process.env.KEYCLOAK_ENVIRONMENT = 'test-env'; + }); + afterEach(() => { sinon.restore(); }); describe('getKeycloakToken', async () => { it('authenticates with keycloak and returns an access token', async () => { - process.env.KEYCLOAK_HOST = 'host'; - process.env.KEYCLOAK_REALM = 'realm'; - process.env.KEYCLOAK_ADMIN_USERNAME = 'admin'; - process.env.KEYCLOAK_ADMIN_PASSWORD = 'password'; - const mockAxiosResponse = { data: { access_token: 'token' } }; const axiosStub = sinon.stub(axios, 'post').resolves(mockAxiosResponse); @@ -30,11 +35,10 @@ describe('KeycloakService', () => { expect(response).to.eql('token'); expect(axiosStub).to.have.been.calledWith( - 'host/auth/realms/realm/protocol/openid-connect/token', - 'grant_type=client_credentials', + `${'host'}/realms/${'realm'}/protocol/openid-connect/token`, + `${'grant_type=client_credentials'}&${'client_id=admin'}&${'client_secret=password'}`, { - headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, - auth: { username: 'admin', password: 'password' } + headers: { 'Content-Type': 'application/x-www-form-urlencoded' } } ); }); @@ -60,22 +64,23 @@ describe('KeycloakService', () => { sinon.stub(KeycloakService.prototype, 'getKeycloakToken').resolves('token'); const mockAxiosResponse = { - data: [ - { - id: 123, - firstName: 'firstName', - lastName: 'lastName', - email: 'email', - enabled: true, - username: 'username', - attributes: { - idir_user_guid: ['string1'], - idir_userid: ['string2'], - idir_guid: ['string3'], - displayName: ['string4'] + data: { + users: [ + { + username: 'username', + email: 'email', + firstName: 'firstName', + lastName: 'lastName', + attributes: { + idir_user_guid: ['string1'], + idir_userid: ['string2'], + idir_guid: ['string3'], + displayName: ['string4'] + } } - } - ] + ], + roles: [] + } }; const axiosStub = sinon.stub(axios, 'get').resolves(mockAxiosResponse); @@ -85,12 +90,10 @@ describe('KeycloakService', () => { const response = await keycloakService.getUserByUsername('test@idir'); expect(response).to.eql({ - id: 123, + username: 'username', + email: 'email', firstName: 'firstName', lastName: 'lastName', - email: 'email', - enabled: true, - username: 'username', attributes: { idir_user_guid: ['string1'], idir_userid: ['string2'], @@ -99,15 +102,18 @@ describe('KeycloakService', () => { } }); - expect(axiosStub).to.have.been.calledWith('host/auth/admin/realms/realm/users/?username=test%40idir', { - headers: { authorization: 'Bearer token' } - }); + expect(axiosStub).to.have.been.calledWith( + `${'api-host'}/integrations/${'1234'}/test-env/user-role-mappings?${'username=test%40idir'}`, + { + headers: { authorization: 'Bearer token' } + } + ); }); it('throws an error if no users are found', async () => { sinon.stub(KeycloakService.prototype, 'getKeycloakToken').resolves('token'); - sinon.stub(axios, 'get').resolves({ data: [] }); + sinon.stub(axios, 'get').resolves({ data: { users: [], roles: [] } }); const keycloakService = new KeycloakService(); @@ -124,7 +130,19 @@ describe('KeycloakService', () => { it('throws an error if more than 1 user is found', async () => { sinon.stub(KeycloakService.prototype, 'getKeycloakToken').resolves('token'); - sinon.stub(axios, 'get').resolves({ data: [{}, {}, {}] }); + sinon.stub(axios, 'get').resolves({ + data: { + users: [ + { + username: 'user1' + }, + { + username: 'user2' + } + ], + roles: [] + } + }); const keycloakService = new KeycloakService(); diff --git a/api/src/services/keycloak-service.ts b/api/src/services/keycloak-service.ts index b30ff93e51..95702f393c 100644 --- a/api/src/services/keycloak-service.ts +++ b/api/src/services/keycloak-service.ts @@ -1,52 +1,42 @@ import axios from 'axios'; import qs from 'qs'; -import { ApiGeneralError } from '../errors/custom-error'; - -type KeycloakUserData = { - id: string; - createdTimestamp: number; - username: string; - enabled: boolean; - totp: boolean; - emailVerified: boolean; - firstName: string; - lastName: string; - email: string; - attributes: IDIRAttributes | BCEIDAttributes; - disableableCredentialTypes: []; - requiredActions: []; - notBefore: number; - access: { - manageGroupMembership: boolean; - view: boolean; - mapRoles: boolean; - impersonate: boolean; - manage: boolean; - }; -}; +import { ApiGeneralError } from '../errors/api-error'; +import { getLogger } from '../utils/logger'; type IDIRAttributes = { idir_user_guid: [string]; - idir_userid: [string]; - idir_guid: [string]; - displayName: [string]; + idir_username: [string]; + display_name: [string]; + given_name: [string]; + family_name: [string]; }; -type BCEIDAttributes = { - bceid_userid: [string]; - displayName: [string]; +interface BCEIDBasicAttributes { + bceid_user_guid: [string]; + bceid_username: [string]; +} + +type BCEIDBusinessAttributes = BCEIDBasicAttributes & { + bceid_business_guid: [string]; + bceid_business_name: [string]; + display_name: [string]; }; +interface KeycloakGetUserResponse { + users: KeycloakUser[]; + roles: Record[]; +} + export type KeycloakUser = { - id: string; username: string; + email: string; firstName: string; lastName: string; - email: string; - enabled: boolean; - attributes: IDIRAttributes | BCEIDAttributes; + attributes: IDIRAttributes | BCEIDBusinessAttributes; }; +const defaultLog = getLogger('services/keycloak-service'); + /** * Service for calling the keycloak admin API. * @@ -57,12 +47,16 @@ export type KeycloakUser = { * @class KeycloakService */ export class KeycloakService { - keycloakRealmUrl: string; - keycloakAdminUrl: string; + keycloakTokenHost: string; + keycloakApiHost: string; + keycloakIntegrationId: string; + keycloakEnvironment: string; constructor() { - this.keycloakRealmUrl = `${process.env.KEYCLOAK_HOST}/auth/realms/${process.env.KEYCLOAK_REALM}`; - this.keycloakAdminUrl = `${process.env.KEYCLOAK_HOST}/auth/admin/realms/${process.env.KEYCLOAK_REALM}`; + this.keycloakTokenHost = `${process.env.KEYCLOAK_ADMIN_HOST}/realms/${process.env.KEYCLOAK_REALM}/protocol/openid-connect/token`; + this.keycloakApiHost = `${process.env.KEYCLOAK_API_HOST}`; + this.keycloakIntegrationId = `${process.env.KEYCLOAK_INTEGRATION_ID}`; + this.keycloakEnvironment = `${process.env.KEYCLOAK_ENVIRONMENT}`; } /** @@ -72,17 +66,19 @@ export class KeycloakService { * @memberof KeycloakService */ async getKeycloakToken(): Promise { + defaultLog.debug({ label: 'getKeycloakToken', keycloakTokenHost: this.keycloakTokenHost }); + try { const { data } = await axios.post( - `${this.keycloakRealmUrl}/protocol/openid-connect/token`, - qs.stringify({ grant_type: 'client_credentials' }), + this.keycloakTokenHost, + qs.stringify({ + grant_type: 'client_credentials', + client_id: process.env.KEYCLOAK_ADMIN_USERNAME, + client_secret: process.env.KEYCLOAK_ADMIN_PASSWORD + }), { headers: { 'Content-Type': 'application/x-www-form-urlencoded' - }, - auth: { - username: process.env.KEYCLOAK_ADMIN_USERNAME as string, - password: process.env.KEYCLOAK_ADMIN_PASSWORD as string } } ); @@ -107,31 +103,29 @@ export class KeycloakService { const token = await this.getKeycloakToken(); try { - const { data } = await axios.get( - `${this.keycloakAdminUrl}/users/?${qs.stringify({ username: username })}`, + const { data } = await axios.get( + `${this.keycloakApiHost}/integrations/${this.keycloakIntegrationId}/${ + this.keycloakEnvironment + }/user-role-mappings?${qs.stringify({ username })}`, { - headers: { - authorization: `Bearer ${token}` - } + headers: { authorization: `Bearer ${token}` } } ); - if (!data.length) { + if (!data.users.length) { throw new ApiGeneralError('Found no matching keycloak users'); } - if (data.length !== 1) { + if (data.users.length !== 1) { throw new ApiGeneralError('Found too many matching keycloak users'); } return { - id: data[0].id, - firstName: data[0].firstName, - lastName: data[0].lastName, - email: data[0].email, - enabled: data[0].enabled, - username: data[0].username, - attributes: data[0].attributes + username: data.users[0].username, + email: data.users[0].email, + firstName: data.users[0].firstName, + lastName: data.users[0].lastName, + attributes: data.users[0].attributes }; } catch (error) { throw new ApiGeneralError('Failed to get user info from keycloak', [(error as Error).message]); diff --git a/api/src/services/occurrence-service.test.ts b/api/src/services/occurrence-service.test.ts new file mode 100644 index 0000000000..37ce3ce5cd --- /dev/null +++ b/api/src/services/occurrence-service.test.ts @@ -0,0 +1,125 @@ +import chai, { expect } from 'chai'; +import { Feature, FeatureCollection } from 'geojson'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { ISpatialComponentFeaturePropertiesRow, OccurrenceRepository } from '../repositories/occurrence-repository'; +import { getMockDBConnection } from '../__mocks__/db'; +import { OccurrenceService } from './occurrence-service'; + +chai.use(sinonChai); + +describe('OccurrenceService', () => { + afterEach(() => { + sinon.restore(); + }); + + const mockService = () => { + const dbConnection = getMockDBConnection(); + return new OccurrenceService(dbConnection); + }; + + describe('getOccurrenceSubmission', () => { + it('should return a post occurrence', async () => { + const submissionId = 1; + const repo = sinon.stub(OccurrenceRepository.prototype, 'getOccurrenceSubmission').resolves({ + occurrence_submission_id: 1, + survey_id: 1, + template_methodology_species_id: 1, + source: '', + input_key: '', + input_file_name: '', + output_key: '', + output_file_name: '', + darwin_core_source: {} + }); + const dbConnection = getMockDBConnection(); + const service = new OccurrenceService(dbConnection); + const response = await service.getOccurrenceSubmission(submissionId); + + expect(repo).to.be.calledOnce; + expect(response?.occurrence_submission_id).to.be.eql(submissionId); + }); + }); + + describe('getOccurrences', () => { + it('should return a post occurrence', async () => { + const submissionId = 1; + const repo = sinon.stub(OccurrenceRepository.prototype, 'getOccurrencesForView').resolves([ + { + taxa_data: [{ associated_taxa: 'string;', vernacular_name: 'string;', submission_spatial_component_id: 1 }], + spatial_component: { + spatial_data: ({ features: [({ id: 1 } as unknown) as Feature] } as unknown) as FeatureCollection + } + } + ]); + + const dbConnection = getMockDBConnection(); + const service = new OccurrenceService(dbConnection); + const response = await service.getOccurrences(submissionId); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql([ + { + taxa_data: [{ associated_taxa: 'string;', vernacular_name: 'string;', submission_spatial_component_id: 1 }], + spatial_data: { features: [({ id: 1 } as unknown) as Feature] } + } + ]); + }); + }); + + describe('updateSurveyOccurrenceSubmissionWithOutputKey', () => { + it('should return a submission id', async () => { + const service = mockService(); + sinon.stub(OccurrenceRepository.prototype, 'updateSurveyOccurrenceSubmissionWithOutputKey').resolves({}); + + const result = await service.updateSurveyOccurrenceSubmissionWithOutputKey(1, 'file name', 'key'); + expect(result).to.be.eql({}); + }); + }); + + describe('updateDWCSourceForOccurrenceSubmission', () => { + it('should return a submission id', async () => { + const service = mockService(); + sinon.stub(OccurrenceRepository.prototype, 'updateDWCSourceForOccurrenceSubmission').resolves(1); + + const id = await service.updateDWCSourceForOccurrenceSubmission(1, '{}'); + expect(id).to.be.eql(1); + }); + }); + + describe('findSpatialMetadataBySubmissionSpatialComponentIds', () => { + it('should return spatial components', async () => { + const service = mockService(); + sinon + .stub(OccurrenceRepository.prototype, 'findSpatialMetadataBySubmissionSpatialComponentIds') + .resolves([({ spatial_component_properties: { id: 1 } } as unknown) as ISpatialComponentFeaturePropertiesRow]); + + const id = await service.findSpatialMetadataBySubmissionSpatialComponentIds([1]); + expect(id).to.be.eql([{ id: 1 }]); + }); + }); + + describe('deleteOccurrenceSubmission', () => { + it('should delete all occurrence data by id', async () => { + const service = mockService(); + + const softDeleteOccurrenceSubmissionStub = sinon + .stub(OccurrenceRepository.prototype, 'softDeleteOccurrenceSubmission') + .resolves(); + const deleteSpatialTransformSubmissionStub = sinon + .stub(OccurrenceRepository.prototype, 'deleteSpatialTransformSubmission') + .resolves(); + const deleteSubmissionSpatialComponentStub = sinon + .stub(OccurrenceRepository.prototype, 'deleteSubmissionSpatialComponent') + .resolves([{ submission_spatial_component_id: 1 }]); + + const id = await service.deleteOccurrenceSubmission(1); + + expect(softDeleteOccurrenceSubmissionStub).to.be.calledOnce; + expect(deleteSpatialTransformSubmissionStub).to.be.calledOnce; + expect(deleteSubmissionSpatialComponentStub).to.be.calledOnce; + expect(id).to.be.eql([{ submission_spatial_component_id: 1 }]); + }); + }); +}); diff --git a/api/src/services/occurrence-service.ts b/api/src/services/occurrence-service.ts new file mode 100644 index 0000000000..929696fa1a --- /dev/null +++ b/api/src/services/occurrence-service.ts @@ -0,0 +1,116 @@ +import { GeoJsonProperties } from 'geojson'; +import { IDBConnection } from '../database/db'; +import { IOccurrenceSubmission, OccurrenceRepository } from '../repositories/occurrence-repository'; +import { DBService } from './db-service'; + +export class OccurrenceService extends DBService { + occurrenceRepository: OccurrenceRepository; + + constructor(connection: IDBConnection) { + super(connection); + this.occurrenceRepository = new OccurrenceRepository(connection); + } + + /** + * Gets a `occurrence_submission` for an id. + * + * @param {number} submissionId + * @return {*} {Promise} + */ + async getOccurrenceSubmission(submissionId: number): Promise { + return this.occurrenceRepository.getOccurrenceSubmission(submissionId); + } + + /** + * Gets list `occurrence` and maps them for use on a map + * + * @param {number} submissionId + * @return {*} {Promise} + */ + async getOccurrences(submissionId: number): Promise { + const response = await this.occurrenceRepository.getOccurrencesForView(submissionId); + + const occurrenceData = response.map((row) => { + const { spatial_component, taxa_data } = row; + const { spatial_data, ...rest } = spatial_component; + return { + taxa_data, + ...rest, + spatial_data: { + ...spatial_data, + features: spatial_data.features.map((feature) => { + delete feature?.properties?.dwc; + return feature; + }) + } + }; + }); + + return occurrenceData; + } + + /** + * Updates `occurrence_submission` output key field. + * + * @param {number} submissionId + * @param {string} fileName + * @param {string} key + * @return {*} {Promise} + */ + async updateSurveyOccurrenceSubmissionWithOutputKey( + submissionId: number, + fileName: string, + key: string + ): Promise { + return this.occurrenceRepository.updateSurveyOccurrenceSubmissionWithOutputKey(submissionId, fileName, key); + } + + /** + * Updates `darwin_core_source` with passed a stringified json object. + * + * @param {number} submissionId + * @param {string} jsonData + * @return {*} {Promise} + */ + async updateDWCSourceForOccurrenceSubmission(submissionId: number, jsonData: string): Promise { + return await this.occurrenceRepository.updateDWCSourceForOccurrenceSubmission(submissionId, jsonData); + } + + /** + * Query builder to find spatial component by given criteria + * + * @param {ISpatialComponentsSearchCriteria} criteria + * @return {*} {Promise} + * @memberof SpatialService + */ + async findSpatialMetadataBySubmissionSpatialComponentIds( + submissionSpatialComponentIds: number[] + ): Promise { + const response = await this.occurrenceRepository.findSpatialMetadataBySubmissionSpatialComponentIds( + submissionSpatialComponentIds + ); + + return response.map((row) => row.spatial_component_properties); + } + + /** + * Soft delete Occurrence Submission + * + * @param {number} occurrenceSubmissionId + * @return {*} + * @memberof OccurrenceService + */ + async deleteOccurrenceSubmission( + occurrenceSubmissionId: number + ): Promise< + { + submission_spatial_component_id: number; + }[] + > { + await this.occurrenceRepository.softDeleteOccurrenceSubmission(occurrenceSubmissionId); + + await this.occurrenceRepository.deleteSpatialTransformSubmission(occurrenceSubmissionId); + + return this.occurrenceRepository.deleteSubmissionSpatialComponent(occurrenceSubmissionId); + } +} diff --git a/api/src/services/permit-service.test.ts b/api/src/services/permit-service.test.ts index 5319b43d09..8a41ca9592 100644 --- a/api/src/services/permit-service.test.ts +++ b/api/src/services/permit-service.test.ts @@ -1,313 +1,251 @@ import chai, { expect } from 'chai'; -import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import { HTTPError } from '../errors/custom-error'; -import permit_queries from '../queries/permit'; +import { SYSTEM_ROLE } from '../constants/roles'; +import { ApiGeneralError } from '../errors/api-error'; +import { UserObject } from '../models/user'; +import { IPermitModel, PermitRepository } from '../repositories/permit-repository'; import { getMockDBConnection } from '../__mocks__/db'; import { PermitService } from './permit-service'; +import { UserService } from './user-service'; chai.use(sinonChai); describe('PermitService', () => { - describe('getAllPermits', () => { + it('constructs', () => { + const mockDBConnection = getMockDBConnection(); + + const permitService = new PermitService(mockDBConnection); + + expect(permitService).to.be.instanceof(PermitService); + }); + + describe('getPermitByUser', () => { afterEach(() => { sinon.restore(); }); - it('should throw a 400 error when no sql statement returned for permits', async () => { - const mockDBConnection = getMockDBConnection(); - const systemUserId = 22; + it('Gets permit by admin user id', async () => { + const mockPermitResponse: IPermitModel[] = [ + { + permit_id: 1, + survey_id: 1, + number: 'permit number', + type: 'permit type', + create_date: new Date().toISOString(), + create_user: 1, + update_date: null, + update_user: null, + revision_count: 0 + } + ]; - sinon.stub(permit_queries, 'getAllPermitsSQL').returns(null); + const mockUserObject: UserObject = { + id: 1, + user_identifier: 'test_user', + user_guid: 'aaaa', + identity_source: 'idir', + record_end_date: '', + role_ids: [], + role_names: [SYSTEM_ROLE.SYSTEM_ADMIN] + }; + const mockDBConnection = getMockDBConnection(); const permitService = new PermitService(mockDBConnection); - try { - await permitService.getAllPermits(systemUserId); + const getAllPermits = sinon.stub(PermitRepository.prototype, 'getAllPermits').resolves(mockPermitResponse); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); + const getUserByIdStub = sinon.stub(UserService.prototype, 'getUserById').resolves(mockUserObject); - it('should return null when permits response has no rows', async () => { - const mockQueryResponse = (null as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); + const response = await permitService.getPermitByUser(mockUserObject.id); - const systemUserId = 22; - - sinon.stub(permit_queries, 'getAllPermitsSQL').returns(SQL`some query`); - - const permitService = new PermitService(mockDBConnection); - - try { - await permitService.getAllPermits(systemUserId); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get all user permits'); - } + expect(getAllPermits).to.be.calledOnce; + expect(getUserByIdStub).to.be.calledOnceWith(mockUserObject.id); + expect(response).to.eql(mockPermitResponse); }); - it('should return all permits on success', async () => { - const allPermits = [ + it('Gets permit by data admin user id', async () => { + const mockPermitResponse: IPermitModel[] = [ { - id: 1, - number: '123', - type: 'scientific', - coordinator_agency: 'agency', - project_name: 'project 1' - }, - { - id: 2, - number: '12345', - type: 'wildlife', - coordinator_agency: 'agency 2', - project_name: null + permit_id: 1, + survey_id: 1, + number: 'permit number', + type: 'permit type', + create_date: new Date().toISOString(), + create_user: 1, + update_date: null, + update_user: null, + revision_count: 0 } ]; - const mockQueryResponse = ({ rows: allPermits } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); + const mockUserObject: UserObject = { + id: 1, + user_identifier: 'test_user', + user_guid: 'aaaa', + identity_source: 'idir', + record_end_date: '', + role_ids: [], + role_names: [SYSTEM_ROLE.DATA_ADMINISTRATOR] + }; - const systemUserId = 22; + const mockDBConnection = getMockDBConnection(); + const permitService = new PermitService(mockDBConnection); - sinon.stub(permit_queries, 'getAllPermitsSQL').returns(SQL`some query`); + const getAllPermits = sinon.stub(PermitRepository.prototype, 'getAllPermits').resolves(mockPermitResponse); - const permitService = new PermitService(mockDBConnection); - const result = await permitService.getAllPermits(systemUserId); + const getUserByIdStub = sinon.stub(UserService.prototype, 'getUserById').resolves(mockUserObject); - expect(result).to.eql(allPermits); - }); - }); + const response = await permitService.getPermitByUser(mockUserObject.id); - describe('getNonSamplingPermits', () => { - afterEach(() => { - sinon.restore(); + expect(getAllPermits).to.be.calledOnce; + expect(getUserByIdStub).to.be.calledOnceWith(mockUserObject.id); + expect(response).to.eql(mockPermitResponse); }); - it('should throw a 400 error when no sql statement returned for non-sampling permits', async () => { - const mockDBConnection = getMockDBConnection(); - const systemUserId = 22; + it('Gets permit by non-admin user id', async () => { + const mockPermitResponse: IPermitModel[] = [ + { + permit_id: 1, + survey_id: 1, + number: 'permit number', + type: 'permit type', + create_date: new Date().toISOString(), + create_user: 1, + update_date: null, + update_user: null, + revision_count: 0 + } + ]; - sinon.stub(permit_queries, 'getNonSamplingPermitsSQL').returns(null); + const mockUserObject: UserObject = { + id: 1, + user_identifier: 'test_user', + user_guid: 'aaaa', + identity_source: 'idir', + record_end_date: '', + role_ids: [], + role_names: [] + }; + const mockDBConnection = getMockDBConnection(); const permitService = new PermitService(mockDBConnection); - try { - await permitService.getNonSamplingPermits(systemUserId); + const getPermitByUser = sinon.stub(PermitRepository.prototype, 'getPermitByUser').resolves(mockPermitResponse); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - } - }); - - it('should throw a 400 error when permits response has no rows', async () => { - const mockQueryResponse = (null as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); + const getUserByIdStub = sinon.stub(UserService.prototype, 'getUserById').resolves(mockUserObject); - const systemUserId = 22; + const response = await permitService.getPermitByUser(mockUserObject.id); - sinon.stub(permit_queries, 'getNonSamplingPermitsSQL').returns(SQL`some query`); + expect(getPermitByUser).to.be.calledOnce; + expect(getUserByIdStub).to.be.calledOnceWith(mockUserObject.id); + expect(response).to.eql(mockPermitResponse); + }); + it('throws api error if user not found', async () => { + const mockDBConnection = getMockDBConnection(); const permitService = new PermitService(mockDBConnection); - try { - await permitService.getNonSamplingPermits(systemUserId); + sinon.stub(UserService.prototype, 'getUserById').resolves(); + try { + await permitService.getPermitByUser(1); expect.fail(); } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to get all user permits'); + expect((actualError as ApiGeneralError).message).to.equal('Failed to acquire user'); } }); + }); - it('should return non-sampling permits on success', async () => { - const nonSamplingPermits = [ - { - permit_id: 1, - number: '123', - type: 'scientific' - }, + describe('getPermitBySurveyId', () => { + it('fetches permits by survey id', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockResponse = [ { permit_id: 2, + survey_id: 1, number: '12345', - type: 'wildlife' + type: 'permit type', + create_date: new Date().toISOString(), + create_user: 3, + update_date: null, + update_user: null, + revision_count: 0 } ]; - const mockQueryResponse = ({ rows: nonSamplingPermits } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - const systemUserId = 22; - - sinon.stub(permit_queries, 'getNonSamplingPermitsSQL').returns(SQL`some query`); + const getPermitBySurveyIdStub = sinon + .stub(PermitRepository.prototype, 'getPermitBySurveyId') + .resolves(mockResponse); const permitService = new PermitService(mockDBConnection); - const result = await permitService.getNonSamplingPermits(systemUserId); - expect(result).to.eql(nonSamplingPermits); - }); - }); + const response = await permitService.getPermitBySurveyId(1); - describe('createNoSamplePermits', () => { - const sampleReq = { - keycloak_token: {}, - body: { - coordinator: { - first_name: 'first', - last_name: 'last', - email_address: 'email@example.com', - coordinator_agency: 'agency', - share_contact_details: true - }, - permit: { - permits: [ - { - permit_number: 'number', - permit_type: 'type' - } - ] - } - } - } as any; + expect(getPermitBySurveyIdStub).to.have.been.calledOnceWith(1); - afterEach(() => { - sinon.restore(); + expect(response).to.equal(mockResponse); }); + }); - it('should throw a 400 error when no permit passed in request body', async () => { + describe('createSurveyPermit', () => { + it('creates a new surevy permit', async () => { const mockDBConnection = getMockDBConnection(); - const permitService = new PermitService(mockDBConnection); + const mockResponse = 2; - try { - await permitService.createNoSamplePermits({ ...sampleReq.body, permit: null }); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing request body param `permit`'); - } - }); - - it('should throw a 400 error when no coordinator passed in request body', async () => { - const mockDBConnection = getMockDBConnection(); + const createSurveyPermitStub = sinon + .stub(PermitRepository.prototype, 'createSurveyPermit') + .resolves(mockResponse); const permitService = new PermitService(mockDBConnection); - try { - await permitService.createNoSamplePermits({ ...sampleReq.body, coordinator: null }); + const response = await permitService.createSurveyPermit(1, '12345', 'permit type'); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Missing request body param `coordinator`'); - } + expect(createSurveyPermitStub).to.have.been.calledOnceWith(1, '12345', 'permit type'); + + expect(response).to.equal(mockResponse); }); + }); - it('should return the inserted ids on success', async () => { + describe('updateSurveyPermit', () => { + it('updates an existing survey permit', async () => { const mockDBConnection = getMockDBConnection(); - const permitService = new PermitService(mockDBConnection); - - sinon.stub(PermitService.prototype, 'insertNoSamplePermit').resolves(20); - - const result = await permitService.createNoSamplePermits(sampleReq.body); - - expect(result).to.eql([20]); - }); + const mockResponse = 2; - it('should throw an error when a failure occurs', async () => { - const expectedError = new Error('cannot process request'); - - const mockDBConnection = getMockDBConnection(); + const updateSurveyPermitStub = sinon + .stub(PermitRepository.prototype, 'updateSurveyPermit') + .resolves(mockResponse); const permitService = new PermitService(mockDBConnection); - sinon.stub(PermitService.prototype, 'insertNoSamplePermit').rejects(expectedError); + const response = await permitService.updateSurveyPermit(1, 2, '12345', 'permit type'); - try { - await permitService.createNoSamplePermits(sampleReq.body); + expect(updateSurveyPermitStub).to.have.been.calledOnceWith(1, 2, '12345', 'permit type'); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal(expectedError.message); - } + expect(response).to.equal(mockResponse); }); }); - describe('insertNoSamplePermit', () => { - afterEach(() => { - sinon.restore(); - }); - - const permitData = { - permit_number: 'number', - permit_type: 'type' - }; - - const coordinatorData = { - first_name: 'first', - last_name: 'last', - email_address: 'email@example.com', - coordinator_agency: 'agency', - share_contact_details: true - }; - - it('should throw an error when cannot generate post sql statement', async () => { + describe('deleteSurveyPermit', () => { + it('deletes an existing survey permit', async () => { const mockDBConnection = getMockDBConnection(); - sinon.stub(permit_queries, 'postPermitNoSamplingSQL').returns(null); - - const permitService = new PermitService(mockDBConnection); + const mockResponse = 2; - try { - await permitService.insertNoSamplePermit(permitData, coordinatorData); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to build SQL insert statement'); - } - }); - - it('should throw a HTTP 400 error when failed to insert non-sampling permits cause result is null', async () => { - const mockQueryResponse = (null as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(permit_queries, 'postPermitNoSamplingSQL').returns(SQL`some`); + const deleteSurveyPermitStub = sinon + .stub(PermitRepository.prototype, 'deleteSurveyPermit') + .resolves(mockResponse); const permitService = new PermitService(mockDBConnection); - try { - await permitService.insertNoSamplePermit(permitData, coordinatorData); - - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).status).to.equal(400); - expect((actualError as HTTPError).message).to.equal('Failed to insert non-sampling permit data'); - } - }); - - it('should return the result id on success', async () => { - const mockQueryResponse = ({ rows: [{ id: 12 }] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(permit_queries, 'postPermitNoSamplingSQL').returns(SQL`some`); - - const permitService = new PermitService(mockDBConnection); + const response = await permitService.deleteSurveyPermit(1, 2); - const res = await permitService.insertNoSamplePermit(permitData, coordinatorData); + expect(deleteSurveyPermitStub).to.have.been.calledOnceWith(1, 2); - expect(res).to.equal(12); + expect(response).to.equal(mockResponse); }); }); }); diff --git a/api/src/services/permit-service.ts b/api/src/services/permit-service.ts index abec156a6b..dcdb8607ab 100644 --- a/api/src/services/permit-service.ts +++ b/api/src/services/permit-service.ts @@ -1,124 +1,95 @@ -import { HTTP400 } from '../errors/custom-error'; -import { IPostPermitNoSampling, PostPermitNoSamplingObject } from '../models/permit-no-sampling'; -import { PostCoordinatorData } from '../models/project-create'; -import { PutCoordinatorData } from '../models/project-update'; -import { queries } from '../queries/queries'; -import { DBService } from './service'; +import { SYSTEM_ROLE } from '../constants/roles'; +import { IDBConnection } from '../database/db'; +import { ApiGeneralError } from '../errors/api-error'; +import { IPermitModel, PermitRepository } from '../repositories/permit-repository'; +import { DBService } from './db-service'; +import { UserService } from './user-service'; +export class PermitService extends DBService { + permitRepository: PermitRepository; -interface IGetAllPermits { - id: string; - number: string; - type: string; - coordinator_agency: string; - project_name: string; -} + constructor(connection: IDBConnection) { + super(connection); -interface IGetNonSamplingPermits { - permit_id: string; - number: string; - type: string; -} + this.permitRepository = new PermitRepository(connection); + } -export class PermitService extends DBService { /** - * get all non-sampling permits + * Get permit by id. * - * @param {(number | null)} systemUserId - * @return {*} {Promise} + * @param {number} surveyId + * @return {*} {IPermitModel[]} * @memberof PermitService */ - async getAllPermits(systemUserId: number | null): Promise { - const sqlStatement = queries.permit.getAllPermitsSQL(systemUserId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rows) { - throw new HTTP400('Failed to get all user permits'); - } - - return response.rows; + async getPermitBySurveyId(surveyId: number): Promise { + return this.permitRepository.getPermitBySurveyId(surveyId); } /** - * get all non-sampling permits + * Get permit by user. * - * @param {(number | null)} systemUserId - * @return {*} {Promise} + * @param + * @return {*} {IPermitModel[]} * @memberof PermitService */ - async getNonSamplingPermits(systemUserId: number | null): Promise { - const sqlStatement = queries.permit.getNonSamplingPermitsSQL(systemUserId); + async getPermitByUser(systemUserId: number): Promise { + const userService = new UserService(this.connection); + const user = await userService.getUserById(systemUserId); - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); + if (!user) { + throw new ApiGeneralError('Failed to acquire user'); } - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rows) { - throw new HTTP400('Failed to get all user permits'); + if ( + user.role_names.includes(SYSTEM_ROLE.SYSTEM_ADMIN) || + user.role_names.includes(SYSTEM_ROLE.DATA_ADMINISTRATOR) + ) { + return this.permitRepository.getAllPermits(); } - return response.rows; + return this.permitRepository.getPermitByUser(systemUserId); } /** - * Creates new no sample permit objects and insert all + * Create and associate permit for survey. * - * @param {object} permitRequestBody - * @return {*} {Promise} + * @param {number} surveyId + * @param {string} permitNumber + * @param {string} permitType + * @return {*} {IPermitModel[]} * @memberof PermitService */ - async createNoSamplePermits(permitRequestBody: object): Promise { - const sanitizedNoSamplePermitPostData = new PostPermitNoSamplingObject(permitRequestBody); - - if (!sanitizedNoSamplePermitPostData.permit || !sanitizedNoSamplePermitPostData.permit.permits.length) { - throw new HTTP400('Missing request body param `permit`'); - } - - if (!sanitizedNoSamplePermitPostData.coordinator) { - throw new HTTP400('Missing request body param `coordinator`'); - } - - return Promise.all( - sanitizedNoSamplePermitPostData.permit.permits.map(async (permit: IPostPermitNoSampling) => - this.insertNoSamplePermit(permit, sanitizedNoSamplePermitPostData.coordinator) - ) - ); + async createSurveyPermit(surveyId: number, permitNumber: string, permitType: string): Promise { + return this.permitRepository.createSurveyPermit(surveyId, permitNumber, permitType); } /** - * insert a no sample permit row. + * Update a survey permit. * - * @param {IPostPermitNoSampling} permit - * @param {(PostCoordinatorData | PutCoordinatorData)} coordinator - * @return {*} {Promise} + * @param {number} surveyId + * @param {number} permitId + * @param {string} permitNumber + * @param {string} permitType + * @return {*} {IPermitModel[]} * @memberof PermitService */ - async insertNoSamplePermit( - permit: IPostPermitNoSampling, - coordinator: PostCoordinatorData | PutCoordinatorData + async updateSurveyPermit( + surveyId: number, + permitId: number, + permitNumber: string, + permitType: string ): Promise { - const systemUserId = this.connection.systemUserId(); - - const sqlStatement = queries.permit.postPermitNoSamplingSQL({ ...permit, ...coordinator }, systemUserId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert non-sampling permit data'); - } + return this.permitRepository.updateSurveyPermit(surveyId, permitId, permitNumber, permitType); + } - return result.id; + /** + * Delete a survey permit. + * + * @param {number} surveyId + * @param {number} permitId + * @return {*} QueryResult + * @memberof PermitService + */ + async deleteSurveyPermit(surveyId: number, permitId: number): Promise { + return this.permitRepository.deleteSurveyPermit(surveyId, permitId); } } diff --git a/api/src/services/platform-service.test.ts b/api/src/services/platform-service.test.ts index a483279598..0601b3c9db 100644 --- a/api/src/services/platform-service.test.ts +++ b/api/src/services/platform-service.test.ts @@ -1,21 +1,125 @@ +import AdmZip from 'adm-zip'; +import { S3 } from 'aws-sdk'; +import { GetObjectOutput } from 'aws-sdk/clients/s3'; import axios from 'axios'; import chai, { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; +import { HTTP400 } from '../errors/http-error'; +import { IGetLatestSurveyOccurrenceSubmission } from '../repositories/survey-repository'; +import * as file_utils from '../utils/file-utils'; +import { getMockDBConnection } from '../__mocks__/db'; +import { EmlService } from './eml-service'; import { KeycloakService } from './keycloak-service'; import { IDwCADataset, PlatformService } from './platform-service'; +import { SurveyService } from './survey-service'; chai.use(sinonChai); describe('PlatformService', () => { - describe('submitNewDataPackage', () => { + describe('submitDwCAMetadataPackage', () => { + afterEach(() => { + sinon.restore(); + }); + it('returns if intake Disabled', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'false'; + + const platformService = new PlatformService(mockDBConnection); + + const response = await platformService.submitDwCAMetadataPackage(1); + + expect(response).to.eql(undefined); + }); + + it('fetches project EML and submits to the backbone', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'true'; + + const buildProjectEmlStub = sinon.stub(EmlService.prototype, 'buildProjectEml').resolves('xml data'); + + sinon.stub(EmlService.prototype, 'packageId').get(() => '123-456-789'); + + const _submitDwCADatasetToBioHubBackboneStub = sinon + .stub(PlatformService.prototype, '_submitDwCADatasetToBioHubBackbone') + .resolves({ data_package_id: '123-456-789' }); + + const platformService = new PlatformService(mockDBConnection); + + await platformService.submitDwCAMetadataPackage(1); + + expect(buildProjectEmlStub).to.have.been.calledOnce; + expect(_submitDwCADatasetToBioHubBackboneStub).to.have.been.calledOnceWith({ + archiveFile: { + data: sinon.match.any, + fileName: 'DwCA.zip', + mimeType: 'application/zip' + }, + dataPackageId: '123-456-789' + }); + }); + }); + + describe('submitDwCADataPackage', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns if intake Disabled', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'false'; + + const platformService = new PlatformService(mockDBConnection); + + const response = await platformService.submitDwCADataPackage(1); + + expect(response).to.eql(undefined); + }); + + it('fetches project EML and occurrence data and submits to the backbone', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'true'; + + const buildProjectEmlStub = sinon.stub(EmlService.prototype, 'buildProjectEml').resolves('xml data'); + + sinon.stub(EmlService.prototype, 'packageId').get(() => '123-456-789'); + + const _submitDwCADatasetToBioHubBackboneStub = sinon + .stub(PlatformService.prototype, '_submitDwCADatasetToBioHubBackbone') + .resolves({ data_package_id: '123-456-789' }); + + const platformService = new PlatformService(mockDBConnection); + + await platformService.submitDwCADataPackage(1); + + expect(buildProjectEmlStub).to.have.been.calledOnce; + expect(_submitDwCADatasetToBioHubBackboneStub).to.have.been.calledOnceWith({ + archiveFile: { + data: sinon.match.any, + fileName: 'DwCA.zip', + mimeType: 'application/zip' + }, + dataPackageId: '123-456-789' + }); + }); + }); + + describe('_submitDwCADatasetToBioHubBackbone', () => { afterEach(() => { sinon.restore(); }); it('makes an axios post to the BioHub Platform Backbone API', async () => { - process.env.BACKBONE_API_HOST = 'backbone.com'; + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_API_HOST = 'http://backbone.com'; + process.env.BACKBONE_INTAKE_PATH = 'api/intake'; + process.env.BACKBONE_INTAKE_ENABLED = 'true'; const keycloakServiceStub = sinon.stub(KeycloakService.prototype, 'getKeycloakToken').resolves('token'); @@ -30,22 +134,159 @@ describe('PlatformService', () => { dataPackageId: '123-456-789' }; - const platformService = new PlatformService(); + const platformService = new PlatformService(mockDBConnection); - await platformService.submitNewDataPackage(dwcaDataset); + await platformService._submitDwCADatasetToBioHubBackbone(dwcaDataset); expect(keycloakServiceStub).to.have.been.calledOnce; - expect(axiosStub).to.have.been.calledOnceWith( - 'backbone.com/api/dwc/submission/create', - sinon.match.instanceOf(Buffer), - { - headers: { - authorization: `Bearer token`, - 'content-type': sinon.match(new RegExp(/^multipart\/form-data; boundary=[-]*[0-9]*$/)) - } + expect(axiosStub).to.have.been.calledOnceWith('http://backbone.com/api/intake', sinon.match.instanceOf(Buffer), { + headers: { + authorization: `Bearer token`, + 'content-type': sinon.match(new RegExp(/^multipart\/form-data; boundary=[-]*[0-9]*$/)) } - ); + }); + }); + }); + + describe('uploadSurveyDataToBioHub', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns if intake Disabled', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'false'; + + const platformService = new PlatformService(mockDBConnection); + + const response = await platformService.uploadSurveyDataToBioHub(1, 1); + + expect(response).to.eql(undefined); + }); + + it('Throw error if no s3 key found', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'true'; + + const getLatestSurveyOccurrenceSubmissionStub = sinon + .stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission') + .resolves(); + + const platformService = new PlatformService(mockDBConnection); + + try { + await platformService.uploadSurveyDataToBioHub(1, 1); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('no s3Key found'); + expect(getLatestSurveyOccurrenceSubmissionStub).to.have.been.calledOnce; + } + }); + + it('Throw error if no s3 file found', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'true'; + + const getLatestSurveyOccurrenceSubmissionStub = sinon + .stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission') + .resolves(({ output_key: 'key' } as unknown) as IGetLatestSurveyOccurrenceSubmission); + + const getFileFromS3Stub = sinon + .stub(file_utils, 'getFileFromS3') + .resolves((false as unknown) as S3.GetObjectOutput); + + const platformService = new PlatformService(mockDBConnection); + + try { + await platformService.uploadSurveyDataToBioHub(1, 1); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('no s3File found'); + expect(getLatestSurveyOccurrenceSubmissionStub).to.have.been.calledOnce; + expect(getFileFromS3Stub).to.have.been.calledOnce; + } + }); + + it('Throw error if eml string failed to build', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'true'; + + const zipFile = new AdmZip(); + + zipFile.addFile('file1.txt', Buffer.from('file1data')); + zipFile.addFile('folder2/', Buffer.from('')); // add folder + zipFile.addFile('folder2/file2.csv', Buffer.from('file2data')); + + const s3File = ({ + Metadata: { filename: 'zipFile.zip' }, + ContentType: 'application/zip', + Body: zipFile.toBuffer() + } as unknown) as GetObjectOutput; + + const getLatestSurveyOccurrenceSubmissionStub = sinon + .stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission') + .resolves(({ output_key: 'key' } as unknown) as IGetLatestSurveyOccurrenceSubmission); + + const getFileFromS3Stub = sinon.stub(file_utils, 'getFileFromS3').resolves(s3File); + + const buildProjectEmlStub = sinon.stub(EmlService.prototype, 'buildProjectEml').resolves(); + + const platformService = new PlatformService(mockDBConnection); + + try { + await platformService.uploadSurveyDataToBioHub(1, 1); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('emlString failed to build'); + expect(getLatestSurveyOccurrenceSubmissionStub).to.have.been.calledOnce; + expect(buildProjectEmlStub).to.have.been.calledOnce; + expect(getFileFromS3Stub).to.have.been.calledOnce; + } + }); + + it('Should succeed with valid data', async () => { + const mockDBConnection = getMockDBConnection(); + + process.env.BACKBONE_INTAKE_ENABLED = 'true'; + + const zipFile = new AdmZip(); + + zipFile.addFile('file1.txt', Buffer.from('file1data')); + zipFile.addFile('folder2/', Buffer.from('')); // add folder + zipFile.addFile('folder2/file2.csv', Buffer.from('file2data')); + + const s3File = ({ + Metadata: { filename: 'zipFile.zip' }, + ContentType: 'application/zip', + Body: zipFile.toBuffer() + } as unknown) as GetObjectOutput; + + const getLatestSurveyOccurrenceSubmissionStub = sinon + .stub(SurveyService.prototype, 'getLatestSurveyOccurrenceSubmission') + .resolves(({ output_key: 'key' } as unknown) as IGetLatestSurveyOccurrenceSubmission); + + const getFileFromS3Stub = sinon.stub(file_utils, 'getFileFromS3').resolves(s3File); + + const buildProjectEmlStub = sinon.stub(EmlService.prototype, 'buildProjectEml').resolves('string'); + sinon.stub(EmlService.prototype, 'packageId').get(() => 1); + + const _submitDwCADatasetToBioHubBackboneStub = sinon + .stub(PlatformService.prototype, '_submitDwCADatasetToBioHubBackbone') + .resolves({ data_package_id: '123-456-789' }); + + const platformService = new PlatformService(mockDBConnection); + + await platformService.uploadSurveyDataToBioHub(1, 1); + + expect(buildProjectEmlStub).to.have.been.calledOnce; + expect(getLatestSurveyOccurrenceSubmissionStub).to.have.been.calledOnce; + expect(getFileFromS3Stub).to.have.been.calledOnce; + expect(_submitDwCADatasetToBioHubBackboneStub).to.have.been.calledOnce; }); }); }); diff --git a/api/src/services/platform-service.ts b/api/src/services/platform-service.ts index c28fbeb6f8..4f9fcd6088 100644 --- a/api/src/services/platform-service.ts +++ b/api/src/services/platform-service.ts @@ -1,6 +1,13 @@ +import AdmZip from 'adm-zip'; import axios from 'axios'; import FormData from 'form-data'; +import { URL } from 'url'; +import { HTTP400 } from '../errors/http-error'; +import { getFileFromS3 } from '../utils/file-utils'; +import { DBService } from './db-service'; +import { EmlService } from './eml-service'; import { KeycloakService } from './keycloak-service'; +import { SurveyService } from './survey-service'; export interface IDwCADataset { archiveFile: { @@ -8,7 +15,13 @@ export interface IDwCADataset { * A Darwin Core Archive (DwCA) zip file. */ data: Buffer; + /** + * The name of the archive file. + */ fileName: string; + /** + * The mime type, should be `application/zip` or similar. + */ mimeType: string; }; /** @@ -17,9 +30,83 @@ export interface IDwCADataset { dataPackageId: string; } -export class PlatformService { +export class PlatformService extends DBService { + BACKBONE_INTAKE_ENABLED = process.env.BACKBONE_INTAKE_ENABLED === 'true' || false; BACKBONE_API_HOST = process.env.BACKBONE_API_HOST; - BACKBONE_API_INGEST_PATH = '/api/dwc/submission/create'; + BACKBONE_INTAKE_PATH = process.env.BACKBONE_INTAKE_PATH || '/api/dwc/submission/intake'; + + /** + * Submit a Darwin Core Archive (DwCA) data package, that only contains the project/survey metadata, to the BioHub + * Platform Backbone. + * + * Why submit only metadata? It is beneficial to submit the metadata as early as possible, so that the project/survey + * is discoverable by users of BioHub, even if the project/survey has not yet completed or not all inventory data has + * been submitted. + * + * Note: Does nothing if `process.env.BACKBONE_INTAKE_ENABLED` is not `true`. + * + * @param {number} projectId + * @return {*} + * @memberof PlatformService + */ + async submitDwCAMetadataPackage(projectId: number) { + if (!this.BACKBONE_INTAKE_ENABLED) { + return; + } + + const emlService = new EmlService({ projectId: projectId }, this.connection); + + const emlString = await emlService.buildProjectEml(); + + const dwcArchiveZip = new AdmZip(); + dwcArchiveZip.addFile('eml.xml', Buffer.from(emlString)); + + const dwCADataset = { + archiveFile: { + data: dwcArchiveZip.toBuffer(), + fileName: 'DwCA.zip', + mimeType: 'application/zip' + }, + dataPackageId: emlService.packageId + }; + + return this._submitDwCADatasetToBioHubBackbone(dwCADataset); + } + + /** + * Submit a Darwin Core Archive (DwCA) data package, that contains both project/survey metadata and survey occurrence + * data, to the BioHub Platform Backbone. + * + * Note: Does nothing if `process.env.BACKBONE_INTAKE_ENABLED` is not `true`. + * + * @param {number} projectId + * @return {*} + * @memberof PlatformService + */ + async submitDwCADataPackage(projectId: number) { + if (!this.BACKBONE_INTAKE_ENABLED) { + return; + } + + const emlService = new EmlService({ projectId: projectId }, this.connection); + + const emlString = await emlService.buildProjectEml(); + + const dwcArchiveZip = new AdmZip(); + dwcArchiveZip.addFile('eml.xml', Buffer.from(emlString)); + // TODO fetch and add DwCA data files to archive + + const dwCADataset = { + archiveFile: { + data: dwcArchiveZip.toBuffer(), + fileName: 'DwCA.zip', + mimeType: 'application/zip' + }, + dataPackageId: emlService.packageId + }; + + return this._submitDwCADatasetToBioHubBackbone(dwCADataset); + } /** * Submit a new Darwin Core Archive (DwCA) data package to the BioHub Platform Backbone. @@ -28,7 +115,7 @@ export class PlatformService { * @return {*} {Promise<{ data_package_id: string }>} * @memberof PlatformService */ - async submitNewDataPackage(dwcaDataset: IDwCADataset): Promise<{ data_package_id: string }> { + async _submitDwCADatasetToBioHubBackbone(dwcaDataset: IDwCADataset): Promise<{ data_package_id: string }> { const keycloakService = new KeycloakService(); const token = await keycloakService.getKeycloakToken(); @@ -42,17 +129,64 @@ export class PlatformService { formData.append('data_package_id', dwcaDataset.dataPackageId); - const { data } = await axios.post<{ data_package_id: string }>( - `${this.BACKBONE_API_HOST}${this.BACKBONE_API_INGEST_PATH}`, - formData.getBuffer(), - { - headers: { - authorization: `Bearer ${token}`, - ...formData.getHeaders() - } + const backboneIntakeUrl = new URL(this.BACKBONE_INTAKE_PATH, this.BACKBONE_API_HOST).href; + + const { data } = await axios.post<{ data_package_id: string }>(backboneIntakeUrl, formData.getBuffer(), { + headers: { + authorization: `Bearer ${token}`, + ...formData.getHeaders() } - ); + }); return data; } + + /** + * Upload Survey/Project/Observation data to Backbone + * + * @param {number} projectId + * @param {number} surveyId + * @return {*} + * @memberof PlatformService + */ + async uploadSurveyDataToBioHub(projectId: number, surveyId: number) { + if (!this.BACKBONE_INTAKE_ENABLED) { + return; + } + + const surveyService = new SurveyService(this.connection); + const surveyData = await surveyService.getLatestSurveyOccurrenceSubmission(surveyId); + + if (!surveyData || !surveyData.output_key) { + throw new HTTP400('no s3Key found'); + } + + const s3File = await getFileFromS3(surveyData.output_key); + + if (!s3File) { + throw new HTTP400('no s3File found'); + } + + const dwcArchiveZip = new AdmZip(s3File.Body as Buffer); + + const emlService = new EmlService({ projectId: projectId }, this.connection); + const emlString = await emlService.buildProjectEml(); + + if (!emlString) { + throw new HTTP400('emlString failed to build'); + } + + dwcArchiveZip.addFile('eml.xml', Buffer.from(emlString)); + + const dwCADataset = { + archiveFile: { + data: dwcArchiveZip.toBuffer(), + fileName: 'DwCA.zip', + mimeType: 'application/zip' + }, + dataPackageId: emlService.packageId + }; + + return this._submitDwCADatasetToBioHubBackbone(dwCADataset); + } } diff --git a/api/src/services/project-service.test.ts b/api/src/services/project-service.test.ts index db5f7b8a6f..a440d6d4a9 100644 --- a/api/src/services/project-service.test.ts +++ b/api/src/services/project-service.test.ts @@ -1,10 +1,7 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; -import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; -import { HTTPError } from '../errors/custom-error'; import { GetCoordinatorData, GetFundingData, @@ -12,10 +9,9 @@ import { GetLocationData, GetObjectivesData, GetPartnershipsData, - GetPermitData, GetProjectData } from '../models/project-view'; -import { queries } from '../queries/queries'; +import { ProjectRepository } from '../repositories/project-repository'; import { getMockDBConnection } from '../__mocks__/db'; import { ProjectService } from './project-service'; @@ -77,268 +73,63 @@ describe('ProjectService', () => { }); describe('getProjectParticipant', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no sql statement produced', async () => { - const mockDBConnection = getMockDBConnection(); - - sinon.stub(queries.projectParticipation, 'getProjectParticipationBySystemUserSQL').returns(null); - - const projectId = 1; - const systemUserId = 1; - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.getProjectParticipant(projectId, systemUserId); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL select statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('should throw a 400 response when response has no rowCount', async () => { - const mockQueryResponse = (null as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'getProjectParticipationBySystemUserSQL').returns(SQL`valid sql`); - - const projectId = 1; - const systemUserId = 1; - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.getProjectParticipant(projectId, systemUserId); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to get project team members'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('returns null if there are no rows', async () => { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'getProjectParticipationBySystemUserSQL').returns(SQL`valid sql`); - - const projectId = 1; - const systemUserId = 1; - - const projectService = new ProjectService(mockDBConnection); - - const result = await projectService.getProjectParticipant(projectId, systemUserId); - - expect(result).to.equal(null); - }); - it('returns the first row on success', async () => { - const mockRowObj = { id: 123 }; - const mockQueryResponse = ({ rows: [mockRowObj] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'getProjectParticipationBySystemUserSQL').returns(SQL`valid sql`); + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - const projectId = 1; - const systemUserId = 1; + const data = { id: 1 }; - const projectService = new ProjectService(mockDBConnection); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getProjectParticipant').resolves(data); - const result = await projectService.getProjectParticipant(projectId, systemUserId); + const response = await service.getProjectParticipant(1, 1); - expect(result).to.equal(mockRowObj); + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); }); }); describe('getProjectParticipants', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no sql statement produced', async () => { - const mockDBConnection = getMockDBConnection(); - - sinon.stub(queries.projectParticipation, 'getAllProjectParticipantsSQL').returns(null); - - const projectId = 1; - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.getProjectParticipants(projectId); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL select statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('should throw a 400 response when response has no rowCount', async () => { - const mockQueryResponse = (null as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'getAllProjectParticipantsSQL').returns(SQL`valid sql`); - - const projectId = 1; - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.getProjectParticipants(projectId); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to get project team members'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('returns empty array if there are no rows', async () => { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'getAllProjectParticipantsSQL').returns(SQL`valid sql`); - - const projectId = 1; - - const projectService = new ProjectService(mockDBConnection); - - const result = await projectService.getProjectParticipants(projectId); - - expect(result).to.eql([]); - }); - - it('returns rows on success', async () => { - const mockRowObj = [{ id: 123 }]; - const mockQueryResponse = ({ rows: mockRowObj } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'getAllProjectParticipantsSQL').returns(SQL`valid sql`); + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - const projectId = 1; + const data = [{ id: 1 }]; - const projectService = new ProjectService(mockDBConnection); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getProjectParticipants').resolves(data); - const result = await projectService.getProjectParticipants(projectId); + const response = await service.getProjectParticipants(1); - expect(result).to.equal(mockRowObj); + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); }); }); describe('addProjectParticipant', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no sql statement produced', async () => { - const mockDBConnection = getMockDBConnection(); - - sinon.stub(queries.projectParticipation, 'addProjectRoleByRoleIdSQL').returns(null); - - const projectId = 1; - const systemUserId = 1; - const projectParticipantRoleId = 1; - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.addProjectParticipant(projectId, systemUserId, projectParticipantRoleId); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL insert statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('should throw a 400 response when response has no rowCount', async () => { - const mockQueryResponse = ({ rowCount: 0 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.projectParticipation, 'addProjectRoleByRoleIdSQL').returns(SQL`valid sql`); - - const projectId = 1; - const systemUserId = 1; - const projectParticipantRoleId = 1; - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.addProjectParticipant(projectId, systemUserId, projectParticipantRoleId); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to insert project team member'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('should not throw an error on success', async () => { - const mockQueryResponse = ({ rowCount: 1 } as unknown) as QueryResult; - const mockQuery = sinon.fake.resolves(mockQueryResponse); - const mockDBConnection = getMockDBConnection({ query: mockQuery }); - - const addProjectRoleByRoleIdSQLStub = sinon - .stub(queries.projectParticipation, 'addProjectRoleByRoleIdSQL') - .returns(SQL`valid sql`); - - const projectId = 1; - const systemUserId = 1; - const projectParticipantRoleId = 1; + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - const projectService = new ProjectService(mockDBConnection); + const repoStub = sinon.stub(ProjectRepository.prototype, 'addProjectParticipant').resolves(); - await projectService.addProjectParticipant(projectId, systemUserId, projectParticipantRoleId); + const response = await service.addProjectParticipant(1, 1, 1); - expect(addProjectRoleByRoleIdSQLStub).to.have.been.calledOnce; - expect(mockQuery).to.have.been.calledOnce; + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); }); }); - describe('getPublicProjectsList', () => { - afterEach(() => { - sinon.restore(); - }); - - it('should throw a 400 error when no sql statement produced', async () => { - const mockDBConnection = getMockDBConnection(); - - sinon.stub(queries.public, 'getPublicProjectListSQL').returns(null); - - const projectService = new ProjectService(mockDBConnection); - - try { - await projectService.getPublicProjectsList(); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); - - it('returns empty array if there are no rows', async () => { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - - sinon.stub(queries.public, 'getPublicProjectListSQL').returns(SQL`valid sql`); - - const projectService = new ProjectService(mockDBConnection); - - const result = await projectService.getPublicProjectsList(); - - expect(result).to.eql([]); - }); - + describe('getProjectList', () => { it('returns rows on success', async () => { - const mockRowObj = [ + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); + + const data = [ { id: 123, name: 'Project 1', start_date: '1900-01-01', - end_date: '2000-10-10', + end_date: '2200-10-10', coordinator_agency: 'Agency 1', - permits_list: '3, 100', project_type: 'Aquatic Habitat' }, { @@ -347,163 +138,136 @@ describe('ProjectService', () => { start_date: '1900-01-01', end_date: '2000-12-31', coordinator_agency: 'Agency 2', - permits_list: '1, 4', project_type: 'Terrestrial Habitat' } ]; - const mockQueryResponse = ({ rows: mockRowObj } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); - sinon.stub(queries.public, 'getPublicProjectListSQL').returns(SQL`valid sql`); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getProjectList').resolves(data); - const projectService = new ProjectService(mockDBConnection); - - const result = await projectService.getPublicProjectsList(); + const response = await service.getProjectList(true, 1, 1); - expect(result[0].id).to.equal(123); - expect(result[0].name).to.equal('Project 1'); - expect(result[0].completion_status).to.equal('Completed'); + expect(repoStub).to.be.calledOnce; + expect(response[0].id).to.equal(123); + expect(response[0].name).to.equal('Project 1'); + expect(response[0].completion_status).to.equal('Active'); - expect(result[1].id).to.equal(456); - expect(result[1].name).to.equal('Project 2'); - expect(result[1].completion_status).to.equal('Completed'); + expect(response[1].id).to.equal(456); + expect(response[1].name).to.equal('Project 2'); + expect(response[1].completion_status).to.equal('Completed'); }); }); +}); - describe('getProjectList', () => { - afterEach(() => { - sinon.restore(); - }); +describe('getProjectData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - it('should throw a 400 error when no sql statement produced', async () => { - const mockDBConnection = getMockDBConnection(); + const data = new GetProjectData({ id: 1 }); - sinon.stub(queries.project, 'getProjectListSQL').returns(null); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getProjectData').resolves(data); - const projectService = new ProjectService(mockDBConnection); + const response = await service.getProjectData(1); - try { - await projectService.getProjectList(true, 1, {}); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL select statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); +}); - it('returns empty array if there are no rows', async () => { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); +describe('getObjectivesData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - sinon.stub(queries.project, 'getProjectListSQL').returns(SQL`valid sql`); + const data = new GetObjectivesData({ id: 1 }); - const projectService = new ProjectService(mockDBConnection); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getObjectivesData').resolves(data); - const result = await projectService.getProjectList(true, 1, {}); + const response = await service.getObjectivesData(1); - expect(result).to.eql([]); - }); + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); +}); - it('returns rows on success', async () => { - const mockRowObj = [ - { - id: 123, - name: 'Project 1', - start_date: '1900-01-01', - end_date: '2200-10-10', - coordinator_agency: 'Agency 1', - publish_timestamp: '2010-01-01', - permits_list: '3, 100', - project_type: 'Aquatic Habitat' - }, - { - id: 456, - name: 'Project 2', - start_date: '1900-01-01', - end_date: '2000-12-31', - coordinator_agency: 'Agency 2', - publish_timestamp: '', - permits_list: '1, 4', - project_type: 'Terrestrial Habitat' - } - ]; - const mockQueryResponse = ({ rows: mockRowObj } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); +describe('getCoordinatorData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - sinon.stub(queries.project, 'getProjectListSQL').returns(SQL`valid sql`); + const data = new GetCoordinatorData({ id: 1 }); - const projectService = new ProjectService(mockDBConnection); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getCoordinatorData').resolves(data); - const result = await projectService.getProjectList(true, 1, {}); + const response = await service.getCoordinatorData(1); - expect(result[0].id).to.equal(123); - expect(result[0].name).to.equal('Project 1'); - expect(result[0].completion_status).to.equal('Active'); - expect(result[0].publish_status).to.equal('Published'); + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); +}); - expect(result[1].id).to.equal(456); - expect(result[1].name).to.equal('Project 2'); - expect(result[1].completion_status).to.equal('Completed'); - expect(result[1].publish_status).to.equal('Unpublished'); - }); +describe('getLocationData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); + + const data = new GetLocationData({ id: 1 }); + + const repoStub = sinon.stub(ProjectRepository.prototype, 'getLocationData').resolves(data); + + const response = await service.getLocationData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); }); +}); - describe('getPublicProjectById', () => { - afterEach(() => { - sinon.restore(); - }); +describe('getIUCNClassificationData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - it('should throw a 400 error when no sql statement produced for getPublicProjectSQL', async () => { - const mockDBConnection = getMockDBConnection(); + const data = new GetIUCNClassificationData([{ id: 1 }]); - sinon.stub(queries.public, 'getPublicProjectSQL').returns(null); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getIUCNClassificationData').resolves(data); - const projectService = new ProjectService(mockDBConnection); + const response = await service.getIUCNClassificationData(1); - try { - await projectService.getPublicProjectById(1); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); +}); - it('should throw a 400 error when no sql statement produced', async () => { - const mockDBConnection = getMockDBConnection(); +describe('getFundingData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - sinon.stub(queries.public, 'getPublicProjectSQL').returns(null); - sinon.stub(queries.public, 'getActivitiesByPublicProjectSQL').returns(null); + const data = new GetFundingData([{ id: 1 }]); - const projectService = new ProjectService(mockDBConnection); + const repoStub = sinon.stub(ProjectRepository.prototype, 'getFundingData').resolves(data); - try { - await projectService.getPublicProjectById(1); - expect.fail(); - } catch (actualError) { - expect((actualError as HTTPError).message).to.equal('Failed to build SQL get statement'); - expect((actualError as HTTPError).status).to.equal(400); - } - }); + const response = await service.getFundingData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); }); +}); - it('returns rows on success', async () => { - const mockQueryResponse = ({ rows: [{ id: 1 }] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); +describe('getPartnershipsData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new ProjectService(dbConnection); - sinon.stub(ProjectService.prototype, 'getPublicProjectData').resolves(new GetProjectData()); - sinon.stub(ProjectService.prototype, 'getObjectivesData').resolves(new GetObjectivesData()); - sinon.stub(ProjectService.prototype, 'getCoordinatorData').resolves(new GetCoordinatorData()); - sinon.stub(ProjectService.prototype, 'getPermitData').resolves(new GetPermitData()); - sinon.stub(ProjectService.prototype, 'getLocationData').resolves(new GetLocationData()); - sinon.stub(ProjectService.prototype, 'getPartnershipsData').resolves(new GetPartnershipsData()); - sinon.stub(ProjectService.prototype, 'getIUCNClassificationData').resolves(new GetIUCNClassificationData()); - sinon.stub(ProjectService.prototype, 'getFundingData').resolves(new GetFundingData()); + const data = new GetPartnershipsData([{ id: 1 }], [{ id: 1 }]); - const projectService = new ProjectService(mockDBConnection); + const repoStub1 = sinon.stub(ProjectRepository.prototype, 'getIndigenousPartnershipsRows').resolves([{ id: 1 }]); + const repoStub2 = sinon.stub(ProjectRepository.prototype, 'getStakeholderPartnershipsRows').resolves([{ id: 1 }]); - const result = await projectService.getPublicProjectById(1); + const response = await service.getPartnershipsData(1); - expect(result.id).to.equal(1); + expect(repoStub1).to.be.calledOnce; + expect(repoStub2).to.be.calledOnce; + expect(response).to.eql(data); }); }); diff --git a/api/src/services/project-service.ts b/api/src/services/project-service.ts index 0e42c6bf9a..84f00728ac 100644 --- a/api/src/services/project-service.ts +++ b/api/src/services/project-service.ts @@ -1,19 +1,14 @@ import moment from 'moment'; -import { PROJECT_ROLE, SYSTEM_ROLE } from '../constants/roles'; +import { QueryResult } from 'pg'; +import { PROJECT_ROLE } from '../constants/roles'; import { COMPLETION_STATUS } from '../constants/status'; -import { HTTP400, HTTP409, HTTP500 } from '../errors/custom-error'; -import { - IPostExistingPermit, - IPostIUCN, - IPostPermit, - PostFundingSource, - PostPermitData, - PostProjectObject -} from '../models/project-create'; +import { IDBConnection } from '../database/db'; +import { HTTP400 } from '../errors/http-error'; +import { IPostIUCN, PostFundingSource, PostProjectObject } from '../models/project-create'; import { IPutIUCN, PutCoordinatorData, - PutFundingSource, + PutFundingData, PutIUCNData, PutLocationData, PutObjectivesData, @@ -21,24 +16,34 @@ import { PutProjectData } from '../models/project-update'; import { + GetAttachmentsData, GetCoordinatorData, GetFundingData, GetIUCNClassificationData, GetLocationData, GetObjectivesData, GetPartnershipsData, - GetPermitData, GetProjectData, + GetReportAttachmentsData, IGetProject } from '../models/project-view'; -import { getSurveyAttachmentS3Keys } from '../paths/project/{projectId}/survey/{surveyId}/delete'; import { GET_ENTITIES, IUpdateProject } from '../paths/project/{projectId}/update'; -import { queries } from '../queries/queries'; -import { userHasValidRole } from '../request-handlers/security/authorization'; +import { ProjectRepository } from '../repositories/project-repository'; import { deleteFileFromS3 } from '../utils/file-utils'; -import { DBService } from './service'; +import { AttachmentService } from './attachment-service'; +import { DBService } from './db-service'; +import { SurveyService } from './survey-service'; export class ProjectService extends DBService { + attachmentService: AttachmentService; + projectRepository: ProjectRepository; + + constructor(connection: IDBConnection) { + super(connection); + this.attachmentService = new AttachmentService(connection); + this.projectRepository = new ProjectRepository(connection); + } + /** * Gets the project participant, adding them if they do not already exist. * @@ -72,19 +77,7 @@ export class ProjectService extends DBService { * @memberof ProjectService */ async getProjectParticipant(projectId: number, systemUserId: number): Promise { - const sqlStatement = queries.projectParticipation.getProjectParticipationBySystemUserSQL(projectId, systemUserId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL select statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response) { - throw new HTTP400('Failed to get project team members'); - } - - return response?.rows?.[0] || null; + return this.projectRepository.getProjectParticipant(projectId, systemUserId); } /** @@ -95,19 +88,7 @@ export class ProjectService extends DBService { * @memberof ProjectService */ async getProjectParticipants(projectId: number): Promise { - const sqlStatement = queries.projectParticipation.getAllProjectParticipantsSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL select statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rows) { - throw new HTTP400('Failed to get project team members'); - } - - return (response && response.rows) || []; + return this.projectRepository.getProjectParticipants(projectId); } /** @@ -126,109 +107,22 @@ export class ProjectService extends DBService { systemUserId: number, projectParticipantRoleId: number ): Promise { - const sqlStatement = queries.projectParticipation.addProjectRoleByRoleIdSQL( - projectId, - systemUserId, - projectParticipantRoleId - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert project team member'); - } - } - - async getPublicProjectsList(): Promise { - const getProjectListSQLStatement = queries.public.getPublicProjectListSQL(); - - if (!getProjectListSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(getProjectListSQLStatement.text, getProjectListSQLStatement.values); - - if (!response || !response.rows || !response.rows.length) { - return []; - } - - return response.rows.map((row) => ({ - id: row.id, - name: row.name, - start_date: row.start_date, - end_date: row.end_date, - coordinator_agency: row.coordinator_agency, - completion_status: - (row.end_date && moment(row.end_date).endOf('day').isBefore(moment()) && COMPLETION_STATUS.COMPLETED) || - COMPLETION_STATUS.ACTIVE, - project_type: row.project_type, - permits_list: row.permits_list - })); - } - - async getPublicProjectById(projectId: number): Promise { - const [ - projectData, - objectiveData, - coordinatorData, - permitData, - locationData, - iucnData, - fundingData, - partnershipsData - ] = await Promise.all([ - this.getPublicProjectData(projectId), - this.getObjectivesData(projectId), - this.getCoordinatorData(projectId), - this.getPermitData(projectId), - this.getLocationData(projectId), - this.getIUCNClassificationData(projectId), - this.getFundingData(projectId), - this.getPartnershipsData(projectId) - ]); - - return { - id: projectId, - project: projectData, - objectives: objectiveData, - coordinator: coordinatorData, - permit: permitData, - location: locationData, - iucn: iucnData, - funding: fundingData, - partnerships: partnershipsData - }; + return this.projectRepository.addProjectParticipant(projectId, systemUserId, projectParticipantRoleId); } async getProjectList(isUserAdmin: boolean, systemUserId: number | null, filterFields: any): Promise { - const sqlStatement = queries.project.getProjectListSQL(isUserAdmin, systemUserId, filterFields); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL select statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response.rows) { - return []; - } + const response = await this.projectRepository.getProjectList(isUserAdmin, systemUserId, filterFields); - return response.rows.map((row) => ({ + return response.map((row) => ({ id: row.id, name: row.name, start_date: row.start_date, end_date: row.end_date, coordinator_agency: row.coordinator_agency_name, - publish_status: row.publish_timestamp ? 'Published' : 'Unpublished', completion_status: (row.end_date && moment(row.end_date).endOf('day').isBefore(moment()) && COMPLETION_STATUS.COMPLETED) || COMPLETION_STATUS.ACTIVE, - project_type: row.project_type, - permits_list: row.permits_list + project_type: row.project_type })); } @@ -237,7 +131,6 @@ export class ProjectService extends DBService { projectData, objectiveData, coordinatorData, - permitData, locationData, iucnData, fundingData, @@ -246,7 +139,6 @@ export class ProjectService extends DBService { this.getProjectData(projectId), this.getObjectivesData(projectId), this.getCoordinatorData(projectId), - this.getPermitData(projectId), this.getLocationData(projectId), this.getIUCNClassificationData(projectId), this.getFundingData(projectId), @@ -258,7 +150,6 @@ export class ProjectService extends DBService { project: projectData, objectives: objectiveData, coordinator: coordinatorData, - permit: permitData, location: locationData, iucn: iucnData, funding: fundingData, @@ -273,7 +164,6 @@ export class ProjectService extends DBService { const results: Pick & Partial> = { id: projectId, coordinator: undefined, - permit: undefined, project: undefined, objectives: undefined, location: undefined, @@ -292,14 +182,6 @@ export class ProjectService extends DBService { ); } - if (entities.includes(GET_ENTITIES.permit)) { - promises.push( - this.getPermitData(projectId).then((value) => { - results.permit = value; - }) - ); - } - if (entities.includes(GET_ENTITIES.partnerships)) { promises.push( this.getPartnershipsData(projectId).then((value) => { @@ -341,8 +223,8 @@ export class ProjectService extends DBService { } if (entities.includes(GET_ENTITIES.funding)) { promises.push( - this.getProjectData(projectId).then((value) => { - results.project = value; + this.getFundingData(projectId).then((value) => { + results.funding = value; }) ); } @@ -353,175 +235,52 @@ export class ProjectService extends DBService { } async getProjectData(projectId: number): Promise { - const getProjectSqlStatement = queries.project.getProjectSQL(projectId); - const getProjectActivitiesSQLStatement = queries.project.getActivitiesByProjectSQL(projectId); - - if (!getProjectSqlStatement || !getProjectActivitiesSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const [project, activity] = await Promise.all([ - this.connection.query(getProjectSqlStatement.text, getProjectSqlStatement.values), - this.connection.query(getProjectActivitiesSQLStatement.text, getProjectActivitiesSQLStatement.values) - ]); - - const projectResult = (project && project.rows && project.rows[0]) || null; - const activityResult = (activity && activity.rows) || null; - - if (!projectResult || !activityResult) { - throw new HTTP400('Failed to get project data'); - } - - return new GetProjectData(projectResult, activityResult); + return this.projectRepository.getProjectData(projectId); } async getObjectivesData(projectId: number): Promise { - const sqlStatement = queries.project.getObjectivesByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result) { - throw new HTTP400('Failed to get project objectives data'); - } - - return new GetObjectivesData(result); + return this.projectRepository.getObjectivesData(projectId); } async getCoordinatorData(projectId: number): Promise { - const sqlStatement = queries.project.getCoordinatorByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result) { - throw new HTTP400('Failed to get project contact data'); - } - - return new GetCoordinatorData(result); - } - - async getPermitData(projectId: number): Promise { - const sqlStatement = queries.project.getProjectPermitsSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL select statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows) || null; - - if (!result) { - throw new HTTP400('Failed to get project permit data'); - } - - return new GetPermitData(result); + return this.projectRepository.getCoordinatorData(projectId); } async getLocationData(projectId: number): Promise { - const sqlStatement = queries.project.getLocationByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows) || null; - - if (!result) { - throw new HTTP400('Failed to get project data'); - } - - return new GetLocationData(result); + return this.projectRepository.getLocationData(projectId); } async getIUCNClassificationData(projectId: number): Promise { - const sqlStatement = queries.project.getIUCNActionClassificationByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows) || null; - - if (!result) { - throw new HTTP400('Failed to get project data'); - } - - return new GetIUCNClassificationData(result); + return this.projectRepository.getIUCNClassificationData(projectId); } async getFundingData(projectId: number): Promise { - const sqlStatement = queries.project.getFundingSourceByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows) || null; - - if (!result) { - throw new HTTP400('Failed to get project data'); - } - - return new GetFundingData(result); + return this.projectRepository.getFundingData(projectId); } async getPartnershipsData(projectId: number): Promise { const [indigenousPartnershipsRows, stakegholderPartnershipsRows] = await Promise.all([ - this.getIndigenousPartnershipsRows(projectId), - this.getStakeholderPartnershipsRows(projectId) + this.projectRepository.getIndigenousPartnershipsRows(projectId), + this.projectRepository.getStakeholderPartnershipsRows(projectId) ]); - if (!indigenousPartnershipsRows) { - throw new HTTP400('Failed to get indigenous partnership data'); - } - - if (!stakegholderPartnershipsRows) { - throw new HTTP400('Failed to get stakeholder partnership data'); - } - return new GetPartnershipsData(indigenousPartnershipsRows, stakegholderPartnershipsRows); } async getIndigenousPartnershipsRows(projectId: number): Promise { - const sqlStatement = queries.project.getIndigenousPartnershipsByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - return (response && response.rows) || null; + return this.projectRepository.getIndigenousPartnershipsRows(projectId); } async getStakeholderPartnershipsRows(projectId: number): Promise { - const sqlStatement = queries.project.getStakeholderPartnershipsByProjectSQL(projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } + return this.projectRepository.getStakeholderPartnershipsRows(projectId); + } - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + async getAttachmentsData(projectId: number): Promise { + return this.projectRepository.getAttachmentsData(projectId); + } - return (response && response.rows) || null; + async getReportAttachmentsData(projectId: number): Promise { + return this.projectRepository.getReportAttachmentsData(projectId); } async createProject(postProjectData: PostProjectObject): Promise { @@ -532,7 +291,7 @@ export class ProjectService extends DBService { // Handle funding sources promises.push( Promise.all( - postProjectData.funding.funding_sources.map((fundingSource: PostFundingSource) => + postProjectData.funding.fundingSources.map((fundingSource: PostFundingSource) => this.insertFundingSource(fundingSource, projectId) ) ) @@ -556,24 +315,6 @@ export class ProjectService extends DBService { ) ); - // Handle new project permits - promises.push( - Promise.all( - postProjectData.permit.permits.map((permit: IPostPermit) => - this.insertPermit(permit.permit_number, permit.permit_type, projectId) - ) - ) - ); - - // Handle existing non-sampling permits which are now being associated to a project - promises.push( - Promise.all( - postProjectData.permit.existing_permits.map((existing_permit: IPostExistingPermit) => - this.associateExistingPermitToProject(existing_permit.permit_id, projectId) - ) - ) - ); - // Handle project IUCN classifications promises.push( Promise.all( @@ -601,180 +342,31 @@ export class ProjectService extends DBService { } async insertProject(postProjectData: PostProjectObject): Promise { - const sqlStatement = queries.project.postProjectSQL({ - ...postProjectData.project, - ...postProjectData.location, - ...postProjectData.objectives, - ...postProjectData.coordinator - }); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project boundary data'); - } - - return result.id; + return this.projectRepository.insertProject(postProjectData); } async insertFundingSource(fundingSource: PostFundingSource, project_id: number): Promise { - const sqlStatement = queries.project.postProjectFundingSourceSQL(fundingSource, project_id); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project funding data'); - } - - return result.id; + return this.projectRepository.insertFundingSource(fundingSource, project_id); } async insertIndigenousNation(indigenousNationsId: number, project_id: number): Promise { - const sqlStatement = queries.project.postProjectIndigenousNationSQL(indigenousNationsId, project_id); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project first nations partnership data'); - } - - return result.id; + return this.projectRepository.insertIndigenousNation(indigenousNationsId, project_id); } async insertStakeholderPartnership(stakeholderPartner: string, project_id: number): Promise { - const sqlStatement = queries.project.postProjectStakeholderPartnershipSQL(stakeholderPartner, project_id); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project stakeholder partnership data'); - } - - return result.id; - } - - async insertPermit(permitNumber: string, permitType: string, projectId: number): Promise { - const systemUserId = this.connection.systemUserId(); - - if (!systemUserId) { - throw new HTTP400('Failed to identify system user ID'); - } - - const sqlStatement = queries.permit.postProjectPermitSQL(permitNumber, permitType, projectId, systemUserId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project permit data'); - } - - return result.id; - } - - async associateExistingPermitToProject(permitId: number, projectId: number): Promise { - const sqlStatement = queries.permit.associatePermitToProjectSQL(permitId, projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL update statement for associatePermitToProjectSQL'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rowCount) || null; - - if (!result) { - throw new HTTP400('Failed to associate existing permit to project'); - } + return this.projectRepository.insertStakeholderPartnership(stakeholderPartner, project_id); } async insertClassificationDetail(iucn3_id: number, project_id: number): Promise { - const sqlStatement = queries.project.postProjectIUCNSQL(iucn3_id, project_id); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project IUCN data'); - } - - return result.id; + return this.projectRepository.insertClassificationDetail(iucn3_id, project_id); } async insertActivity(activityId: number, projectId: number): Promise { - const sqlStatement = queries.project.postProjectActivitySQL(activityId, projectId); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new HTTP400('Failed to insert project activity data'); - } - - return result.id; + return this.projectRepository.insertActivity(activityId, projectId); } async insertParticipantRole(projectId: number, projectParticipantRole: string): Promise { - const systemUserId = this.connection.systemUserId(); - - if (!systemUserId) { - throw new HTTP400('Failed to identify system user ID'); - } - - const sqlStatement = queries.projectParticipation.addProjectRoleByRoleNameSQL( - projectId, - systemUserId, - projectParticipantRole - ); - - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response || !response.rowCount) { - throw new HTTP400('Failed to insert project team member'); - } + return this.projectRepository.insertParticipantRole(projectId, projectParticipantRole); } async updateProject(projectId: number, entities: IUpdateProject) { @@ -788,10 +380,6 @@ export class ProjectService extends DBService { promises.push(this.updateProjectData(projectId, entities)); } - if (entities?.permit && entities?.coordinator) { - promises.push(this.updatePermitData(projectId, entities)); - } - if (entities?.iucn) { promises.push(this.updateIUCNData(projectId, entities)); } @@ -803,53 +391,10 @@ export class ProjectService extends DBService { await Promise.all(promises); } - async updatePermitData(projectId: number, entities: IUpdateProject): Promise { - if (!entities.permit) { - throw new HTTP400('Missing request body entity `permit`'); - } - - const putPermitData = new PostPermitData(entities.permit); - - const sqlDeleteStatement = queries.project.deletePermitSQL(projectId); - - if (!sqlDeleteStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - const deleteResult = await this.connection.query(sqlDeleteStatement.text, sqlDeleteStatement.values); - - if (!deleteResult) { - throw new HTTP409('Failed to delete project permit data'); - } - - const insertPermitPromises = - putPermitData?.permits?.map((permit: IPostPermit) => { - return this.insertPermit(permit.permit_number, permit.permit_type, projectId); - }) || []; - - // Handle existing non-sampling permits which are now being associated to a project - const updateExistingPermitPromises = - putPermitData?.existing_permits?.map((existing_permit: IPostExistingPermit) => { - return this.associateExistingPermitToProject(existing_permit.permit_id, projectId); - }) || []; - - await Promise.all([insertPermitPromises, updateExistingPermitPromises]); - } - async updateIUCNData(projectId: number, entities: IUpdateProject): Promise { const putIUCNData = (entities?.iucn && new PutIUCNData(entities.iucn)) || null; - const sqlDeleteStatement = queries.project.deleteIUCNSQL(projectId); - - if (!sqlDeleteStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - const deleteResult = await this.connection.query(sqlDeleteStatement.text, sqlDeleteStatement.values); - - if (!deleteResult) { - throw new HTTP409('Failed to delete project IUCN data'); - } + await this.projectRepository.deleteIUCNData(projectId); const insertIUCNPromises = putIUCNData?.classificationDetails?.map((iucnClassification: IPutIUCN) => @@ -862,35 +407,8 @@ export class ProjectService extends DBService { async updatePartnershipsData(projectId: number, entities: IUpdateProject): Promise { const putPartnershipsData = (entities?.partnerships && new PutPartnershipsData(entities.partnerships)) || null; - const sqlDeleteIndigenousPartnershipsStatement = queries.project.deleteIndigenousPartnershipsSQL(projectId); - const sqlDeleteStakeholderPartnershipsStatement = queries.project.deleteStakeholderPartnershipsSQL(projectId); - - if (!sqlDeleteIndigenousPartnershipsStatement || !sqlDeleteStakeholderPartnershipsStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - const deleteIndigenousPartnershipsPromises = this.connection.query( - sqlDeleteIndigenousPartnershipsStatement.text, - sqlDeleteIndigenousPartnershipsStatement.values - ); - - const deleteStakeholderPartnershipsPromises = this.connection.query( - sqlDeleteStakeholderPartnershipsStatement.text, - sqlDeleteStakeholderPartnershipsStatement.values - ); - - const [deleteIndigenousPartnershipsResult, deleteStakeholderPartnershipsResult] = await Promise.all([ - deleteIndigenousPartnershipsPromises, - deleteStakeholderPartnershipsPromises - ]); - - if (!deleteIndigenousPartnershipsResult) { - throw new HTTP409('Failed to delete project indigenous partnerships data'); - } - - if (!deleteStakeholderPartnershipsResult) { - throw new HTTP409('Failed to delete project stakeholder partnerships data'); - } + await this.projectRepository.deleteIndigenousPartnershipsData(projectId); + await this.projectRepository.deleteStakeholderPartnershipsData(projectId); const insertIndigenousPartnershipsPromises = putPartnershipsData?.indigenous_partnerships?.map((indigenousPartnership: number) => @@ -923,7 +441,7 @@ export class ProjectService extends DBService { throw new HTTP400('Failed to parse request body'); } - const sqlUpdateProject = queries.project.putProjectSQL( + await this.projectRepository.updateProjectData( projectId, putProjectData, putLocationData, @@ -932,35 +450,13 @@ export class ProjectService extends DBService { revision_count ); - if (!sqlUpdateProject) { - throw new HTTP400('Failed to build SQL update statement'); - } - - const result = await this.connection.query(sqlUpdateProject.text, sqlUpdateProject.values); - - if (!result || !result.rowCount) { - // TODO if revision count is bad, it is supposed to raise an exception? - // It currently does skip the update as expected, but it just returns 0 rows updated, and doesn't result in any errors - throw new HTTP409('Failed to update stale project data'); - } - if (putProjectData?.project_activities.length) { await this.updateActivityData(projectId, putProjectData); } } async updateActivityData(projectId: number, projectData: PutProjectData) { - const sqlDeleteActivities = queries.project.deleteActivitiesSQL(projectId); - - if (!sqlDeleteActivities) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - const deleteActivitiesResult = await this.connection.query(sqlDeleteActivities.text, sqlDeleteActivities.values); - - if (!deleteActivitiesResult) { - throw new HTTP409('Failed to update project activity data'); - } + await this.projectRepository.deleteActivityData(projectId); const insertActivityPromises = projectData?.project_activities?.map((activityId: number) => this.insertActivity(activityId, projectId)) || []; @@ -968,132 +464,102 @@ export class ProjectService extends DBService { await Promise.all([...insertActivityPromises]); } + /** + * Compares incoming project funding data against the existing funding data, if any, and determines which need to be + * deleted, added, or updated. + * + * @param {number} projectId + * @param {IUpdateProject} entities + * @return {*} {Promise} + * @memberof ProjectService + */ async updateFundingData(projectId: number, entities: IUpdateProject): Promise { - const putFundingSource = entities?.funding && new PutFundingSource(entities.funding); - - const surveyFundingSourceDeleteStatement = queries.survey.deleteSurveyFundingSourceByProjectFundingSourceIdSQL( - putFundingSource?.id - ); - const projectFundingSourceDeleteStatement = queries.project.deleteProjectFundingSourceSQL( - projectId, - putFundingSource?.id - ); - - if (!projectFundingSourceDeleteStatement || !surveyFundingSourceDeleteStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - const surveyFundingSourceDeleteResult = await this.connection.query( - surveyFundingSourceDeleteStatement.text, - surveyFundingSourceDeleteStatement.values - ); + const projectRepository = new ProjectRepository(this.connection); - if (!surveyFundingSourceDeleteResult) { - throw new HTTP409('Failed to delete survey funding source'); + const putFundingData = entities?.funding && new PutFundingData(entities.funding); + if (!putFundingData) { + throw new HTTP400('Failed to create funding data object'); } + // Get any existing funding for this project + const existingProjectFundingSources = await projectRepository.getProjectFundingSourceIds(projectId); - const projectFundingSourceDeleteResult = await this.connection.query( - projectFundingSourceDeleteStatement.text, - projectFundingSourceDeleteStatement.values - ); - - if (!projectFundingSourceDeleteResult) { - throw new HTTP409('Failed to delete project funding source'); - } - - const sqlInsertStatement = queries.project.putProjectFundingSourceSQL(putFundingSource, projectId); - - if (!sqlInsertStatement) { - throw new HTTP400('Failed to build SQL insert statement'); - } - - const insertResult = await this.connection.query(sqlInsertStatement.text, sqlInsertStatement.values); + // Compare the array of existing funding to the array of incoming funding (by project_funding_source_id) and collect any + // existing funding that are not in the incoming funding array. + const existingFundingSourcesToDelete = existingProjectFundingSources.filter((existingFunding) => { + // Find all existing funding (by project_funding_source_id) that have no matching incoming project_funding_source_id + return !putFundingData.fundingSources.find( + (incomingFunding) => incomingFunding.id === existingFunding.project_funding_source_id + ); + }); - if (!insertResult) { - throw new HTTP409('Failed to put (insert) project funding source with incremented revision count'); - } - } + // Delete from the database all existing project and survey funding that have been removed + if (existingFundingSourcesToDelete.length) { + const promises: Promise[] = []; - async updatePublishStatus(projectId: number, publish: boolean): Promise { - const sqlStatement = queries.project.updateProjectPublishStatusSQL(projectId, publish); + existingFundingSourcesToDelete.forEach((funding) => { + // Delete funding connection to survey first + promises.push( + projectRepository.deleteSurveyFundingSourceConnectionToProject(funding.project_funding_source_id) + ); + // Delete project funding after + promises.push(projectRepository.deleteProjectFundingSource(funding.project_funding_source_id)); + }); - if (!sqlStatement) { - throw new HTTP400('Failed to build SQL statement'); + await Promise.all(promises); } - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - const result = (response && response.rows && response.rows[0]) || null; + // The remaining funding are either new, and can be created, or updates to existing funding + const promises: Promise[] = []; - if (!response || !result) { - throw new HTTP500('Failed to update project publish status'); - } + putFundingData.fundingSources.forEach((funding) => { + if (funding.id) { + // Has a project_funding_source_id, indicating this is an update to an existing funding + promises.push(projectRepository.updateProjectFundingSource(funding, projectId)); + } else { + // No project_funding_source_id, indicating this is a new funding which needs to be created + promises.push(projectRepository.insertProjectFundingSource(funding, projectId)); + } + }); - return result.id; + await Promise.all(promises); } - async deleteProject(projectId: number, userRoles: string | string[]): Promise { + async deleteProject(projectId: number): Promise { /** * PART 1 - * Check that user is a system administrator - can delete a project (published or not) - * Check that user is a project administrator - can delete a project (unpublished only) + * Check that user is a system administrator - can delete a project * */ - const getProjectSQLStatement = queries.project.getProjectSQL(projectId); - if (!getProjectSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const projectData = await this.connection.query(getProjectSQLStatement.text, getProjectSQLStatement.values); - - const projectResult = (projectData && projectData.rows && projectData.rows[0]) || null; + const projectResult = await this.getProjectData(projectId); - if (!projectResult || !projectResult.id) { + if (!projectResult || !projectResult.uuid) { throw new HTTP400('Failed to get the project'); } - if (projectResult.publish_date && userHasValidRole([SYSTEM_ROLE.PROJECT_CREATOR], userRoles)) { - throw new HTTP400('Cannot delete a published project if you are not a system administrator.'); - } - /** * PART 2 * Get the attachment S3 keys for all attachments associated to this project and surveys under this project * Used to delete them from S3 separately later */ - const getProjectAttachmentSQLStatement = queries.project.getProjectAttachmentsSQL(projectId); - const getSurveyIdsSQLStatement = queries.survey.getSurveyIdsSQL(projectId); - if (!getProjectAttachmentSQLStatement || !getSurveyIdsSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const getProjectAttachmentsResult = await this.connection.query( - getProjectAttachmentSQLStatement.text, - getProjectAttachmentSQLStatement.values - ); - - if (!getProjectAttachmentsResult || !getProjectAttachmentsResult.rows) { - throw new HTTP400('Failed to get project attachments'); - } + const surveyService = new SurveyService(this.connection); - const getSurveyIdsResult = await this.connection.query( - getSurveyIdsSQLStatement.text, - getSurveyIdsSQLStatement.values - ); - - if (!getSurveyIdsResult || !getSurveyIdsResult.rows) { - throw new HTTP400('Failed to get survey ids associated to project'); - } + const getSurveyIdsResult = await surveyService.getSurveyIdsByProjectId(projectId); const surveyAttachmentS3Keys: string[] = Array.prototype.concat.apply( [], await Promise.all( - getSurveyIdsResult.rows.map((survey: any) => getSurveyAttachmentS3Keys(survey.id, this.connection)) + getSurveyIdsResult.map(async (survey: any) => { + const surveyAttachments = await this.attachmentService.getSurveyAttachments(survey.id); + return surveyAttachments.map((attachment) => attachment.key); + }) ) ); - const projectAttachmentS3Keys: string[] = getProjectAttachmentsResult.rows.map((attachment: any) => { + const getProjectAttachments = await this.attachmentService.getProjectAttachments(projectId); + + const projectAttachmentS3Keys: string[] = getProjectAttachments.map((attachment: any) => { return attachment.key; }); @@ -1101,13 +567,8 @@ export class ProjectService extends DBService { * PART 3 * Delete the project and all associated records/resources from our DB */ - const deleteProjectSQLStatement = queries.project.deleteProjectSQL(projectId); - if (!deleteProjectSQLStatement) { - throw new HTTP400('Failed to build SQL delete statement'); - } - - await this.connection.query(deleteProjectSQLStatement.text, deleteProjectSQLStatement.values); + await this.projectRepository.deleteProject(projectId); /** * PART 4 @@ -1125,26 +586,15 @@ export class ProjectService extends DBService { return true; } - async getPublicProjectData(projectId: number): Promise { - const getProjectSqlStatement = queries.public.getPublicProjectSQL(projectId); - const getProjectActivitiesSQLStatement = queries.public.getActivitiesByPublicProjectSQL(projectId); - - if (!getProjectSqlStatement || !getProjectActivitiesSQLStatement) { - throw new HTTP400('Failed to build SQL get statement'); - } - - const [project, activity] = await Promise.all([ - this.connection.query(getProjectSqlStatement.text, getProjectSqlStatement.values), - this.connection.query(getProjectActivitiesSQLStatement.text, getProjectActivitiesSQLStatement.values) - ]); - - const projectResult = (project && project.rows && project.rows[0]) || null; - const activityResult = (activity && activity.rows) || null; + async deleteDraft(draftId: number): Promise { + return this.projectRepository.deleteDraft(draftId); + } - if (!projectResult || !activityResult) { - throw new HTTP400('Failed to get project data'); - } + async getSingleDraft(draftId: number): Promise<{ id: number; name: string; data: any }> { + return this.projectRepository.getSingleDraft(draftId); + } - return new GetProjectData(projectResult, activityResult); + async deleteProjectParticipationRecord(projectParticipationId: number): Promise { + return this.projectRepository.deleteProjectParticipationRecord(projectParticipationId); } } diff --git a/api/src/services/spatial-service.test.ts b/api/src/services/spatial-service.test.ts new file mode 100644 index 0000000000..1d4ca92df3 --- /dev/null +++ b/api/src/services/spatial-service.test.ts @@ -0,0 +1,153 @@ +import chai, { expect } from 'chai'; +import { FeatureCollection } from 'geojson'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { IGetSpatialTransformRecord, SpatialRepository } from '../repositories/spatial-repository'; +import { getMockDBConnection } from '../__mocks__/db'; +import { SpatialService } from './spatial-service'; + +chai.use(sinonChai); + +describe('SpatialService', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getSpatialTransformRecords', () => { + it('should return IGetSpatialTransformRecord on get', async () => { + const mockDBConnection = getMockDBConnection(); + const spatialService = new SpatialService(mockDBConnection); + + const repo = sinon + .stub(SpatialRepository.prototype, 'getSpatialTransformRecords') + .resolves(([{ name: 'name' }] as unknown) as IGetSpatialTransformRecord[]); + + const response = await spatialService.getSpatialTransformRecords(); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql([{ name: 'name' }]); + }); + }); + + describe('insertSpatialTransformSubmissionRecord', () => { + it('should return spatial_transform_submission_id after insert', async () => { + const mockDBConnection = getMockDBConnection(); + const spatialService = new SpatialService(mockDBConnection); + + const repo = sinon + .stub(SpatialRepository.prototype, 'insertSpatialTransformSubmissionRecord') + .resolves({ spatial_transform_submission_id: 1 }); + + const response = await spatialService.insertSpatialTransformSubmissionRecord(1, 1); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql({ spatial_transform_submission_id: 1 }); + }); + }); + + describe('runSpatialTransforms', () => { + it('should return submission_spatial_component_id after running transform and inserting data', async () => { + const mockDBConnection = getMockDBConnection(); + const spatialService = new SpatialService(mockDBConnection); + + const getSpatialTransformRecordsStub = sinon + .stub(SpatialService.prototype, 'getSpatialTransformRecords') + .resolves([ + { + spatial_transform_id: 1, + name: 'name1', + description: null, + notes: null, + transform: 'transform1' + }, + { + spatial_transform_id: 2, + name: 'name2', + description: null, + notes: null, + transform: 'transform2' + } + ]); + + const runSpatialTransformOnSubmissionIdStub = sinon + .stub(SpatialRepository.prototype, 'runSpatialTransformOnSubmissionId') + .onCall(0) + .resolves([ + { result_data: ('result1' as unknown) as FeatureCollection }, + { result_data: ('result2' as unknown) as FeatureCollection } + ]) + .onCall(1) + .resolves([ + { result_data: ('result3' as unknown) as FeatureCollection }, + { result_data: ('result4' as unknown) as FeatureCollection } + ]); + + const insertSubmissionSpatialComponentStub = sinon + .stub(SpatialRepository.prototype, 'insertSubmissionSpatialComponent') + .onCall(0) + .resolves({ submission_spatial_component_id: 3 }) + .onCall(1) + .resolves({ submission_spatial_component_id: 4 }) + .onCall(2) + .resolves({ submission_spatial_component_id: 5 }) + .onCall(3) + .resolves({ submission_spatial_component_id: 6 }); + + const insertSpatialTransformSubmissionRecordStub = sinon + .stub(SpatialRepository.prototype, 'insertSpatialTransformSubmissionRecord') + .resolves(); + + await spatialService.runSpatialTransforms(9); + + expect(getSpatialTransformRecordsStub).to.be.calledOnceWith(); + expect(runSpatialTransformOnSubmissionIdStub).to.be.calledWith(9, 'transform1').calledWith(9, 'transform2'); + expect(insertSubmissionSpatialComponentStub) + .to.be.calledWith(9, 'result1') + .calledWith(9, 'result2') + .calledWith(9, 'result3') + .calledWith(9, 'result4'); + expect(insertSpatialTransformSubmissionRecordStub) + .to.be.calledWith(1, 3) + .calledWith(1, 4) + .calledWith(2, 5) + .calledWith(2, 6); + }); + }); + + describe('deleteSpatialComponentsBySubmissionId', () => { + it('should return submission IDs upon deleting spatial data', async () => { + const mockDBConnection = getMockDBConnection(); + const spatialService = new SpatialService(mockDBConnection); + + const mockResponseRows = ([{ occurrence_submission_id: 3 }] as unknown) as { occurrence_submission_id: number }[]; + + const repo = sinon + .stub(SpatialRepository.prototype, 'deleteSpatialComponentsBySubmissionId') + .resolves(mockResponseRows); + + const response = await spatialService.deleteSpatialComponentsBySubmissionId(3); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql(mockResponseRows); + }); + }); + + describe('deleteSpatialComponentsSpatialTransformRefsBySubmissionId', () => { + it('should return submission IDs upon deleting spatial data', async () => { + const mockDBConnection = getMockDBConnection(); + const spatialService = new SpatialService(mockDBConnection); + + const mockResponseRows = ([{ occurrence_submission_id: 3 }] as unknown) as { occurrence_submission_id: number }[]; + + const repo = sinon + .stub(SpatialRepository.prototype, 'deleteSpatialComponentsSpatialTransformRefsBySubmissionId') + .resolves(mockResponseRows); + + const response = await spatialService.deleteSpatialComponentsSpatialTransformRefsBySubmissionId(3); + + expect(repo).to.be.calledOnce; + expect(response).to.be.eql(mockResponseRows); + }); + }); +}); diff --git a/api/src/services/spatial-service.ts b/api/src/services/spatial-service.ts new file mode 100644 index 0000000000..234bc081bf --- /dev/null +++ b/api/src/services/spatial-service.ts @@ -0,0 +1,101 @@ +import { IDBConnection } from '../database/db'; +import { IGetSpatialTransformRecord, SpatialRepository } from '../repositories/spatial-repository'; +import { DBService } from './db-service'; + +export class SpatialService extends DBService { + spatialRepository: SpatialRepository; + + constructor(connection: IDBConnection) { + super(connection); + + this.spatialRepository = new SpatialRepository(connection); + } + + /** + * get spatial transform records + * + * @return {*} {Promise} + * @memberof SpatialService + */ + async getSpatialTransformRecords(): Promise { + return this.spatialRepository.getSpatialTransformRecords(); + } + + /** + * Insert record of transform id used for submission spatial component record + * + * @param {number} spatialTransformId + * @param {number} submissionSpatialComponentId + * @return {*} {Promise<{ spatial_transform_submission_id: number }>} + * @memberof SpatialService + */ + async insertSpatialTransformSubmissionRecord( + spatialTransformId: number, + submissionSpatialComponentId: number + ): Promise<{ spatial_transform_submission_id: number }> { + return this.spatialRepository.insertSpatialTransformSubmissionRecord( + spatialTransformId, + submissionSpatialComponentId + ); + } + + /** + * Collect transforms from db, run transformations on submission id, save result to spatial component table + * + * @param {number} submissionId + * @return {*} {Promise} + * @memberof SpatialService + */ + async runSpatialTransforms(submissionId: number): Promise { + const spatialTransformRecords = await this.getSpatialTransformRecords(); + + const transformRecordPromises = spatialTransformRecords.map(async (transformRecord) => { + const transformed = await this.spatialRepository.runSpatialTransformOnSubmissionId( + submissionId, + transformRecord.transform + ); + + const insertSpatialTransformSubmissionRecordPromises = transformed.map(async (dataPoint) => { + const submissionSpatialComponentId = await this.spatialRepository.insertSubmissionSpatialComponent( + submissionId, + dataPoint.result_data + ); + + await this.insertSpatialTransformSubmissionRecord( + transformRecord.spatial_transform_id, + submissionSpatialComponentId.submission_spatial_component_id + ); + }); + + await Promise.all(insertSpatialTransformSubmissionRecordPromises); + }); + + await Promise.all(transformRecordPromises); + } + + /** + * Delete spatial component records by submission id. + * + * @param {number} occurrence_submission_id + * @return {*} {Promise<{ occurrence_submission_id: number }[]>} + * @memberof SpatialService + */ + async deleteSpatialComponentsBySubmissionId( + occurrence_submission_id: number + ): Promise<{ occurrence_submission_id: number }[]> { + return this.spatialRepository.deleteSpatialComponentsBySubmissionId(occurrence_submission_id); + } + + /** + * Delete records referencing which spatial transforms were applied to a spatial component + * + * @param {number} occurrence_submission_id + * @return {*} {Promise<{ occurrence_submission_id: number }[]>} + * @memberof SpatialService + */ + async deleteSpatialComponentsSpatialTransformRefsBySubmissionId( + occurrence_submission_id: number + ): Promise<{ occurrence_submission_id: number }[]> { + return this.spatialRepository.deleteSpatialComponentsSpatialTransformRefsBySubmissionId(occurrence_submission_id); + } +} diff --git a/api/src/services/summary-service.test.ts b/api/src/services/summary-service.test.ts new file mode 100644 index 0000000000..5c1bb2a67e --- /dev/null +++ b/api/src/services/summary-service.test.ts @@ -0,0 +1,888 @@ +import chai, { expect } from 'chai'; +import { shuffle } from 'lodash'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import xlsx from 'xlsx'; +import { + MESSAGE_CLASS_NAME, + SUBMISSION_MESSAGE_TYPE, + SUBMISSION_STATUS_TYPE, + SUMMARY_SUBMISSION_MESSAGE_TYPE +} from '../constants/status'; +import { HTTP400 } from '../errors/http-error'; +import { SummaryRepository } from '../repositories/summary-repository'; +import * as FileUtils from '../utils/file-utils'; +// import { ITemplateMethodologyData } from '../repositories/validation-repository'; +import { ICsvState } from '../utils/media/csv/csv-file'; +// import { DWCArchive } from '../utils/media/dwc/dwc-archive-file'; +import { IMediaState, MediaFile } from '../utils/media/media-file'; +import * as MediaUtils from '../utils/media/media-utils'; +import { ValidationSchemaParser } from '../utils/media/validation/validation-schema-parser'; +/* +import * as MediaUtils from '../utils/media/media-utils'; +import { ValidationSchemaParser } from '../utils/media/validation/validation-schema-parser'; +import { TransformationSchemaParser } from '../utils/media/xlsx/transformation/transformation-schema-parser'; +import { XLSXTransformation } from '../utils/media/xlsx/transformation/xlsx-transformation'; + + + + +*/ +import { XLSXCSV } from '../utils/media/xlsx/xlsx-file'; +import { + MessageError, + SubmissionError, + SummarySubmissionError, + SummarySubmissionErrorFromMessageType +} from '../utils/submission-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { SummaryService } from './summary-service'; +import { SurveyService } from './survey-service'; + +chai.use(sinonChai); + +// const mockS3File = { +// fieldname: 'media', +// originalname: 'test.csv', +// encoding: '7bit', +// mimetype: 'text/csv', +// size: 340 +// }; + +// const s3Archive = { +// fieldname: 'media', +// originalname: 'test.zip', +// encoding: '7bit', +// mimetype: 'application/zip', +// size: 340 +// }; + +const mockService = () => { + const dbConnection = getMockDBConnection(); + return new SummaryService(dbConnection); +}; + +const makeMockTemplateSpeciesRecord = (seed: number) => ({ + summary_template_species_id: seed + 1, + summary_template_id: seed + 1, + wldtaxonomic_units_id: 4165 + seed, + validation: JSON.stringify({ test_schema_id: seed + 1 }), + create_user: 1, + update_date: null, + update_user: null, + revision_count: 1 +}); + +const buildFile = (fileName: string, customProps: { template_id?: number; csm_id?: number }) => { + const newWorkbook = xlsx.utils.book_new(); + newWorkbook.Custprops = {}; + + if (customProps.csm_id && customProps.template_id) { + newWorkbook.Custprops['sims_template_id'] = customProps.template_id; + newWorkbook.Custprops['sims_csm_id'] = customProps.csm_id; + } + + const ws_name = 'SheetJS'; + + // make worksheet + const ws_data = [ + ['S', 'h', 'e', 'e', 't', 'J', 'S'], + [1, 2, 3, 4, 5] + ]; + const ws = xlsx.utils.aoa_to_sheet(ws_data); + + // Add the worksheet to the workbook + xlsx.utils.book_append_sheet(newWorkbook, ws, ws_name); + + const buffer = xlsx.write(newWorkbook, { type: 'buffer' }); + + return new MediaFile(fileName, 'text/csv', buffer); +}; + +describe('SummaryService', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('validateFile', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should run without issue', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'summaryTemplatePreparation').resolves(mockPrep); + const validation = sinon.stub(service, 'summaryTemplateValidation').resolves(); + + await service.validateFile(1, 1); + expect(prep).to.be.calledOnce; + expect(validation).to.be.calledOnce; + }); + + it('should insert submission error', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const mockError = SummarySubmissionErrorFromMessageType( + SUMMARY_SUBMISSION_MESSAGE_TYPE.MISSING_VALIDATION_SCHEMA + ); + const prep = sinon.stub(service, 'summaryTemplatePreparation').resolves(mockPrep); + sinon.stub(service.summaryRepository, 'insertSummarySubmissionMessage').resolves(); + const validation = sinon.stub(service, 'summaryTemplateValidation').throws(mockError); + + try { + await service.validateFile(1, 1); + expect(prep).to.be.calledOnce; + } catch (error) { + expect(error).to.be.instanceOf(SummarySubmissionError); + expect(validation).not.to.be.calledOnce; + } + }); + + it('should throw', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'summaryTemplatePreparation').resolves(mockPrep); + const validation = sinon.stub(service, 'summaryTemplateValidation').throws(new Error()); + + try { + await service.validateFile(1, 1); + expect(prep).to.be.calledOnce; + expect(validation).to.be.calledOnce; + } catch (error) { + expect(error).not.to.be.instanceOf(SubmissionError); + } + }); + }); + + describe('updateSurveySummarySubmissionWithKey', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should update a survey summary submission key', async () => { + const service = mockService(); + const update = sinon + .stub(service, 'updateSurveySummarySubmissionWithKey') + .resolves({ survey_summary_submission_id: 12 }); + const result = await service.updateSurveySummarySubmissionWithKey(12, 'new-test-key'); + + expect(update).to.be.calledOnce; + expect(result).to.be.eql({ survey_summary_submission_id: 12 }); + }); + }); + + describe('insertSurveySummarySubmission', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should insert a summary submission', async () => { + const service = mockService(); + + sinon + .stub(SummaryRepository.prototype, 'insertSurveySummarySubmission') + .resolves({ survey_summary_submission_id: 5 }); + const result = await service.insertSurveySummarySubmission(10, 'biohub-unit-testing', 'test-filename'); + + expect(result).to.eql({ survey_summary_submission_id: 5 }); + }); + + it('should throw an error if the repo fails to insert the summary submission', async () => { + sinon + .stub(SummaryRepository.prototype, 'insertSurveySummarySubmission') + .throws(new HTTP400('Failed to insert survey summary submission record')); + + try { + const service = mockService(); + await service.insertSurveySummarySubmission(10, 'biohub-unit-testing', 'test-filename'); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('Failed to insert survey summary submission record'); + } + }); + }); + + describe('deleteSummarySubmission', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return a row count of 1 when successfully deleting', async () => { + const service = mockService(); + + sinon.stub(SummaryRepository.prototype, 'deleteSummarySubmission').resolves(1); + + const result = await service.deleteSummarySubmission(10); + + expect(result).to.be.equal(1); + }); + + it('should return a row count of 0 when deleting an already delete submission', async () => { + const service = mockService(); + + sinon.stub(SummaryRepository.prototype, 'deleteSummarySubmission').resolves(0); + + const result = await service.deleteSummarySubmission(10); + + expect(result).to.be.equal(0); + }); + + it('should throw an error when the repo throws an error', async () => { + sinon + .stub(SummaryRepository.prototype, 'deleteSummarySubmission') + .throws(new HTTP400('Failed to soft delete survey summary submission record')); + + try { + const service = mockService(); + await service.deleteSummarySubmission(10); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('Failed to soft delete survey summary submission record'); + } + }); + }); + + describe('getSummarySubmissionMessages', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should successfully retreive an array of submission messages', async () => { + const service = mockService(); + + sinon.stub(SummaryRepository.prototype, 'getSummarySubmissionMessages').resolves([ + { + id: 1, + class: 'class1', + type: 'type1', + message: 'message1' + }, + { + id: 2, + class: 'class2', + type: 'type2', + message: 'message2' + } + ]); + + const result = await service.getSummarySubmissionMessages(10); + + expect(result.length).to.be.equal(2); + expect(result).to.be.eql([ + { + id: 1, + class: 'class1', + type: 'type1', + message: 'message1' + }, + { + id: 2, + class: 'class2', + type: 'type2', + message: 'message2' + } + ]); + }); + + it('should return an empty array if the repo finds no messages', async () => { + const service = mockService(); + + sinon.stub(SummaryRepository.prototype, 'getSummarySubmissionMessages').resolves([]); + + const result = await service.getSummarySubmissionMessages(10); + + expect(result.length).to.be.equal(0); + expect(result).to.be.eql([]); + }); + + it('should throw an error when the repo throws an error', async () => { + sinon + .stub(SummaryRepository.prototype, 'getSummarySubmissionMessages') + .throws(new HTTP400('Failed to query survey summary submission table')); + + try { + const service = mockService(); + await service.getSummarySubmissionMessages(10); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('Failed to query survey summary submission table'); + } + }); + }); + + describe('findSummarySubmissionById', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should successfully retreive a submission', async () => { + const service = mockService(); + + sinon.stub(SummaryRepository.prototype, 'findSummarySubmissionById').resolves({ + survey_summary_submission_id: 10, + survey_id: 1, + source: 'source', + event_timestamp: null, + delete_timestamp: null, + key: 's3Key', + file_name: 'filename', + create_user: 1, + update_date: null, + update_user: null, + revision_count: 1, + summary_template_species_id: 1 + }); + + const result = await service.findSummarySubmissionById(10); + + expect(result).to.be.eql({ + survey_summary_submission_id: 10, + survey_id: 1, + source: 'source', + event_timestamp: null, + delete_timestamp: null, + key: 's3Key', + file_name: 'filename', + create_user: 1, + update_date: null, + update_user: null, + revision_count: 1, + summary_template_species_id: 1 + }); + }); + + it('should throw an error when the repo throws an error', async () => { + sinon + .stub(SummaryRepository.prototype, 'findSummarySubmissionById') + .throws(new HTTP400('Failed to query survey summary submission table')); + + try { + const service = mockService(); + await service.findSummarySubmissionById(10); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('Failed to query survey summary submission table'); + } + }); + }); + + describe('getLatestSurveySummarySubmission', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should successfully retreive a submission', async () => { + const service = mockService(); + + sinon.stub(SummaryRepository.prototype, 'getLatestSurveySummarySubmission').resolves({ + id: 30, + file_name: 'file13.xlsx', + key: 's3_key', + delete_timestamp: null, + submission_message_type_id: 1, + message: 'another error message', + submission_message_type_name: 'Miscellaneous', + summary_submission_message_class_id: 1, + submission_message_class_name: MESSAGE_CLASS_NAME.ERROR + }); + + const result = await service.getLatestSurveySummarySubmission(20); + + expect(result).to.be.eql({ + id: 30, + file_name: 'file13.xlsx', + key: 's3_key', + delete_timestamp: null, + submission_message_type_id: 1, + message: 'another error message', + submission_message_type_name: 'Miscellaneous', + summary_submission_message_class_id: 1, + submission_message_class_name: MESSAGE_CLASS_NAME.ERROR + }); + }); + + it('should throw an error when the repo throws an error', async () => { + sinon + .stub(SummaryRepository.prototype, 'getLatestSurveySummarySubmission') + .throws(new HTTP400('Failed to query survey summary submission table')); + + try { + const service = mockService(); + await service.getLatestSurveySummarySubmission(21); + expect.fail(); + } catch (actualError) { + expect((actualError as HTTP400).message).to.equal('Failed to query survey summary submission table'); + } + }); + }); + + describe('summaryTemplatePreparation', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid S3 key and xlsx object', async () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const s3Key = 's3-key'; + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + sinon.stub(SummaryService.prototype, 'prepXLSX').returns(new XLSXCSV(file)); + sinon.stub(SummaryService.prototype, 'findSummarySubmissionById').resolves({ + survey_summary_submission_id: 1, + survey_id: 1, + source: 'source', + event_timestamp: null, + delete_timestamp: null, + key: s3Key, + file_name: 'filename', + create_user: 1, + update_date: null, + update_user: null, + revision_count: 1, + summary_template_species_id: 1 + }); + + const service = mockService(); + const results = await service.summaryTemplatePreparation(1); + + expect(results.xlsx).to.not.be.empty; + expect(results.xlsx).to.be.instanceOf(XLSXCSV); + expect(results.s3InputKey).to.be.eql(s3Key); + }); + + it('throws Failed to prepare submission error', async () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const s3Key = 's3-key'; + sinon.stub(FileUtils, 'getFileFromS3').throws(new SubmissionError({})); + sinon.stub(SummaryService.prototype, 'prepXLSX').resolves(new XLSXCSV(file)); + sinon.stub(SummaryService.prototype, 'findSummarySubmissionById').resolves({ + survey_summary_submission_id: 1, + survey_id: 1, + source: 'source', + event_timestamp: null, + delete_timestamp: null, + key: s3Key, + file_name: 'filename', + create_user: 1, + update_date: null, + update_user: null, + revision_count: 1, + summary_template_species_id: 1 + }); + + try { + const dbConnection = getMockDBConnection(); + const service = new SummaryService(dbConnection); + await service.summaryTemplatePreparation(1); + + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + if (error instanceof SubmissionError) { + expect(error.status).to.be.eql(SUBMISSION_STATUS_TYPE.FAILED_SUMMARY_PREPARATION); + } + } + }); + }); + + describe('summaryTemplateValidation', () => { + afterEach(() => { + sinon.restore(); + }); + + it('Should log the particular validation schema that was found if summarySubmissionId is given', async () => { + const service = mockService(); + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + + const getValidation = sinon + .stub(service, 'getSummaryTemplateSpeciesRecords') + .resolves([ + makeMockTemplateSpeciesRecord(99), + makeMockTemplateSpeciesRecord(199), + makeMockTemplateSpeciesRecord(299), + makeMockTemplateSpeciesRecord(399) + ]); + + const getRules = sinon.stub(service, 'getValidationRules').resolves(''); + const validate = sinon.stub(service, 'validateXLSX').resolves({}); + const persistResults = sinon.stub(service, 'persistSummaryValidationResults').resolves(); + + const logFoundValidation = sinon.stub(SummaryRepository.prototype, 'insertSummarySubmissionMessage').resolves(); + + await service.summaryTemplateValidation(xlsxCsv, 70, 60); + + expect(getValidation).to.be.calledOnce; + expect(getRules).to.be.calledOnce; + expect(validate).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + + expect(logFoundValidation).to.be.calledOnce; + expect(logFoundValidation).to.be.calledWith( + 60, + SUMMARY_SUBMISSION_MESSAGE_TYPE.FOUND_VALIDATION, + "Found validation having summary template species ID '100' among 4 record(s)." + ); + }); + + it('should complete without error', async () => { + const service = mockService(); + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + + const getValidation = sinon + .stub(service, 'getSummaryTemplateSpeciesRecords') + .resolves([makeMockTemplateSpeciesRecord(1)]); + const getRules = sinon.stub(service, 'getValidationRules').resolves(''); + const validate = sinon.stub(service, 'validateXLSX').resolves({}); + const persistResults = sinon.stub(service, 'persistSummaryValidationResults').resolves(); + + await service.summaryTemplateValidation(xlsxCsv, 1); + + expect(getValidation).to.be.calledOnce; + expect(getRules).to.be.calledOnce; + expect(validate).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + }); + + it('should pick the first validation schema deterministically', async () => { + const service = mockService(); + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + + const templateSpeciesRecords = shuffle([...Array(20).keys()].map(makeMockTemplateSpeciesRecord)); + + const getValidation = sinon.stub(service, 'getSummaryTemplateSpeciesRecords').resolves(templateSpeciesRecords); + const getRules = sinon.stub(service, 'getValidationRules').resolves(''); + const validate = sinon.stub(service, 'validateXLSX').resolves({}); + const persistResults = sinon.stub(service, 'persistSummaryValidationResults').resolves(); + + await service.summaryTemplateValidation(xlsxCsv, 1); + + expect(getValidation).to.be.calledOnce; + expect(getRules).to.have.been.calledWith(templateSpeciesRecords[0].validation); + expect(validate).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + }); + + it('should throw FAILED_GET_VALIDATION_RULES error if no validation found', async () => { + const service = mockService(); + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + sinon.stub(service, 'getSummaryTemplateSpeciesRecords').resolves([]); + sinon.stub(service, 'getValidationRules').resolves({}); + + try { + await service.summaryTemplateValidation(xlsxCsv, 1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SummarySubmissionError); + if (error instanceof SummarySubmissionError) { + expect(error.summarySubmissionMessages.length).to.equal(1); + expect(error.summarySubmissionMessages[0].type).to.be.eql( + SUMMARY_SUBMISSION_MESSAGE_TYPE.FAILED_GET_VALIDATION_RULES + ); + } + } + }); + + it('should throw FAILED_PARSE_VALIDATION_SCHEMA error if getValidationRules fails', async () => { + const service = mockService(); + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + sinon.stub(service, 'getSummaryTemplateSpeciesRecords').resolves([ + { + ...makeMockTemplateSpeciesRecord(1), + validation: 'this validation string will fail' + } + ]); + + try { + await service.summaryTemplateValidation(xlsxCsv, 1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SummarySubmissionError); + if (error instanceof SummarySubmissionError) { + expect(error.summarySubmissionMessages.length).to.equal(1); + expect(error.summarySubmissionMessages[0].type).to.be.eql( + SUMMARY_SUBMISSION_MESSAGE_TYPE.FAILED_PARSE_VALIDATION_SCHEMA + ); + } + } + }); + + it('should throw INVALID_MEDIA error if validateXLSX fails with invalid media', async () => { + const service = mockService(); + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + const validation = 'test-template-validation-schema'; + const mockSchemaParser = { validationSchema: validation }; + + sinon.stub(XLSXCSV.prototype, 'validateMedia'); + sinon.stub(XLSXCSV.prototype, 'getMediaState').returns({ + isValid: false, + fileName: 'test filename' + }); + + const getValidation = sinon.stub(service, 'getValidationRules').resolves(mockSchemaParser); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + sinon + .stub(service, 'getSummaryTemplateSpeciesRecords') + .resolves([{ ...makeMockTemplateSpeciesRecord(1), validation }]); + + try { + await service.summaryTemplateValidation(xlsxCsv, 1); + expect.fail(); + } catch (error) { + expect(getValidation).to.be.calledWith('test-template-validation-schema'); + expect(error).to.be.instanceOf(SummarySubmissionError); + if (error instanceof SummarySubmissionError) { + expect(error.summarySubmissionMessages.length).to.equal(1); + expect(error.summarySubmissionMessages[0].type).to.equal(SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + } + }); + }); + + describe('prepXLSX', () => { + afterEach(() => { + sinon.restore(); + }); + it('should return valid XLSXCSV', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(file); + sinon.stub(XLSXCSV, 'prototype').returns({ + workbook: { + rawWorkbook: { + Custprops: { + sims_template_id: 1, + sims_csm_id: 1 + } + } + } + }); + + const service = mockService(); + try { + const xlsx = service.prepXLSX(file); + expect(xlsx).to.not.be.empty; + expect(xlsx).to.be.instanceOf(XLSXCSV); + } catch (error) { + expect(parse).to.be.calledOnce; + } + }); + + it('should throw File submitted is not a supported type error', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(null); + + const service = mockService(); + try { + service.prepXLSX(file); + expect.fail(); + } catch (error) { + if (error instanceof SummarySubmissionError) { + expect(error.summarySubmissionMessages[0].type).to.be.eql( + SUMMARY_SUBMISSION_MESSAGE_TYPE.UNSUPPORTED_FILE_TYPE + ); + } + + expect(error).to.be.instanceOf(SummarySubmissionError); + expect(parse).to.be.calledOnce; + } + }); + + it('should throw `XLSX CSV is Invalid` error', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(('a file' as unknown) as MediaFile); + + const service = mockService(); + try { + service.prepXLSX(file); + expect.fail(); + } catch (error) { + if (error instanceof SummarySubmissionError) { + expect(error.summarySubmissionMessages[0].type).to.be.eql(SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_XLSX_CSV); + } + + expect(error).to.be.instanceOf(SummarySubmissionError); + expect(parse).to.be.calledOnce; + } + }); + }); + + describe('getSummaryTemplateSpeciesRecords', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid `ISummaryTemplateSpeciesData[]`', async () => { + const service = mockService(); + const mockSpecies = sinon + .stub(SurveyService.prototype, 'getSpeciesData') + .resolves({ focal_species: [], focal_species_names: [], ancillary_species: [], ancillary_species_names: [] }); + const mockXLSX = ({ + workbook: { + rawWorkbook: { + Custprops: { sims_name: 'Moose SRB', sims_version: '1.0' } + } + } + } as unknown) as XLSXCSV; + const mockResults = [ + { + summary_template_species_id: 1, + summary_template_id: 1, + wldtaxonomic_units_id: 1, + validation: '', + create_user: 1, + update_date: '', + update_user: 1, + revision_count: 1 + } + ]; + const mockRecords = sinon + .stub(SummaryRepository.prototype, 'getSummaryTemplateSpeciesRecords') + .resolves(mockResults); + + const results = await service.getSummaryTemplateSpeciesRecords(mockXLSX, 1); + expect(results).to.be.eql(mockResults); + expect(mockSpecies).to.be.called; + expect(mockRecords).to.be.called; + }); + }); + + describe('getValidationRules', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return validation schema parser', () => { + const service = mockService(); + + const parser = service.getValidationRules({}); + expect(parser).to.be.instanceOf(ValidationSchemaParser); + }); + + it('should fail with invalid json', () => { + const service = mockService(); + sinon + .stub(service, 'getValidationRules') + .throws(new Error('ValidationSchemaParser - provided json was not valid JSON')); + try { + service.getValidationRules('---'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.be.eql('ValidationSchemaParser - provided json was not valid JSON'); + } + }); + }); + describe('validateXLSX', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid state object', async () => { + const service = mockService(); + const xlsx = new XLSXCSV(buildFile('test file', {})); + const parser = new ValidationSchemaParser({}); + const response = await service.validateXLSX(xlsx, parser); + + expect(response.media_state.isValid).to.be.true; + expect(response.media_state.fileErrors).is.empty; + }); + }); + + describe('persistSummaryValidationResults', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw a submission error with multiple messages attached', async () => { + const service = mockService(); + const csvState: ICsvState[] = [ + { + fileName: '', + isValid: false, + keyErrors: [], + headerErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, + message: '', + col: 'Effort & Effects' + } + ], + rowErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, + message: 'Invalid Value', + col: 'Block SU', + row: 1 + } + ] + } + ]; + const mediaState: IMediaState = { + fileName: 'Test.xlsx', + isValid: true + }; + try { + await service.persistSummaryValidationResults(csvState, mediaState); + expect.fail(); + } catch (error) { + if (error instanceof SummarySubmissionError) { + error.summarySubmissionMessages.forEach((e) => { + expect(e.type).to.be.eql(SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_VALUE); + }); + } + } + }); + + it('should run without issue', async () => { + it('should return false if no errors are present', async () => { + const service = mockService(); + const csvState: ICsvState[] = []; + const mediaState: IMediaState = { + fileName: 'Test.xlsx', + isValid: true + }; + const response = await service.persistSummaryValidationResults(csvState, mediaState); + // no errors found, data is valid + expect(response).to.be.false; + }); + }); + }); + + describe('insertSummarySubmissionError', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should run without issue', async () => { + const connection = getMockDBConnection(); + const mockService = new SummaryService(connection); + const mockInsert = sinon.stub(SummaryRepository.prototype, 'insertSummarySubmissionMessage').resolves(); + const error = new SummarySubmissionError({ + messages: [new MessageError(SUMMARY_SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER)] + }); + await mockService.insertSummarySubmissionError(1, error); + + expect(mockInsert).to.be.called; + }); + }); +}); diff --git a/api/src/services/summary-service.ts b/api/src/services/summary-service.ts new file mode 100644 index 0000000000..cf449d9032 --- /dev/null +++ b/api/src/services/summary-service.ts @@ -0,0 +1,430 @@ +import { SUBMISSION_STATUS_TYPE, SUMMARY_SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { IDBConnection } from '../database/db'; +import { PostSummaryDetails } from '../models/summaryresults-create'; +import { + ISummarySubmissionMessagesResponse, + ISummarySubmissionResponse, + ISummaryTemplateSpeciesData, + ISurveySummaryDetails, + SummaryRepository +} from '../repositories/summary-repository'; +import { getFileFromS3 } from '../utils/file-utils'; +import { getLogger } from '../utils/logger'; +import { ICsvState, IHeaderError, IKeyError, IRowError } from '../utils/media/csv/csv-file'; +import { IMediaState, MediaFile } from '../utils/media/media-file'; +import { parseUnknownMedia } from '../utils/media/media-utils'; +import { ValidationSchemaParser } from '../utils/media/validation/validation-schema-parser'; +import { XLSXCSV } from '../utils/media/xlsx/xlsx-file'; +import { + MessageError, + SubmissionError, + SummarySubmissionError, + SummarySubmissionErrorFromMessageType +} from '../utils/submission-error'; +import { DBService } from './db-service'; +import { SurveyService } from './survey-service'; + +const defaultLog = getLogger('services/summary-service'); + +interface ICsvMediaState { + csv_state: ICsvState[]; + media_state: IMediaState; +} + +export class SummaryService extends DBService { + summaryRepository: SummaryRepository; + surveyService: SurveyService; + + constructor(connection: IDBConnection) { + super(connection); + this.summaryRepository = new SummaryRepository(connection); + this.surveyService = new SurveyService(connection); + } + + /** + * Validates a summary submission file based on given summary submission ID and survey ID. + * @param summarySubmissionId + * @param surveyId + * @return {Promise} + */ + async validateFile(summarySubmissionId: number, surveyId: number): Promise { + defaultLog.debug({ label: 'validateFile' }); + try { + // First, prep XLSX + const submissionPrep = await this.summaryTemplatePreparation(summarySubmissionId); + + // Next, validate the summary template + await this.summaryTemplateValidation(submissionPrep.xlsx, surveyId, summarySubmissionId); + } catch (error) { + if (error instanceof SummarySubmissionError) { + // If any summary submission parsing or file errors are thrown, persist them + await this.insertSummarySubmissionError(summarySubmissionId, error); + } else { + throw error; + } + } + } + + /** + * Update existing `survey_summary_submission` record with an S3 key. + * + * @param {number} submissionId + * @param {string} key + * @return {Promise<{ survey_summary_submission_id: number }>} + */ + async updateSurveySummarySubmissionWithKey( + summarySubmissionId: number, + key: string + ): Promise<{ survey_summary_submission_id: number }> { + defaultLog.debug({ label: 'updateSurveySummarySubmissionWithKey' }); + return this.summaryRepository.updateSurveySummarySubmissionWithKey(summarySubmissionId, key); + } + + /** + * Inserts a new record into the `survey_summary_submission` table. + * + * @param {number} surveyId + * @param {string} source + * @param {string} file_name + * @return {Promise<{ survey_summary_submission_id: number }>} + */ + async insertSurveySummarySubmission( + surveyId: number, + source: string, + file_name: string + ): Promise<{ survey_summary_submission_id: number }> { + defaultLog.debug({ label: 'insertSurveySummarySubmission' }); + return this.summaryRepository.insertSurveySummarySubmission(surveyId, source, file_name); + } + + /** + * Soft deletes the summary submission entry by ID + * + * @param {number} summarySubmissionId + * @returns {Promise} row count if delete is successful, null otherwise. + */ + async deleteSummarySubmission(summarySubmissionId: number): Promise { + return this.summaryRepository.deleteSummarySubmission(summarySubmissionId); + } + + /** + * Upload scraped summary submission data. + * + * TODO: Remove this and all related "scraping" code for summary submissions? I don't think we plan to scrape + * summary report submissions anymore. I don't think the "survey_summary_detail" table exists anymore. + * + * @param {number} summarySubmissionId + * @param {any} scrapedSummaryDetail + * @return {Promise<{ survey_summary_detail_id: number }>} + */ + async uploadScrapedSummarySubmission( + summarySubmissionId: number, + scrapedSummaryDetail: PostSummaryDetails + ): Promise<{ survey_summary_detail_id: number }> { + return this.summaryRepository.insertSurveySummaryDetails(summarySubmissionId, scrapedSummaryDetail); + } + + /** + * Gets the list of messages for a summary submission. + * + * @param {number} summarySubmissionId + * @returns {*} {Promise} + */ + async getSummarySubmissionMessages(summarySubmissionId: number): Promise { + return this.summaryRepository.getSummarySubmissionMessages(summarySubmissionId); + } + + /** + * Gets the record for a single summary submission. + * + * @param {number} surveyId + * @returns {Promise} + */ + async findSummarySubmissionById(summarySubmissionId: number): Promise { + return this.summaryRepository.findSummarySubmissionById(summarySubmissionId); + } + + /** + * Gets latest summary submission for a survey. + * + * @param {number} surveyId + * @returns {Promise} + */ + async getLatestSurveySummarySubmission(surveyId: number): Promise { + return this.summaryRepository.getLatestSurveySummarySubmission(surveyId); + } + + /** + * Prepares a summary template submission + * @param summarySubmissionId + * @returns {Promise<{ s3InputKey: string; xlsx: XLSXCSV }>} + */ + async summaryTemplatePreparation(summarySubmissionId: number): Promise<{ s3InputKey: string; xlsx: XLSXCSV }> { + defaultLog.debug({ label: 'summaryTemplatePreparation' }); + try { + const summarySubmission = await this.findSummarySubmissionById(summarySubmissionId); + const s3InputKey = summarySubmission.key; // S3 key + const s3File = await getFileFromS3(s3InputKey); + const xlsx = this.prepXLSX(s3File); + + return { s3InputKey: s3InputKey, xlsx: xlsx }; + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_SUMMARY_PREPARATION); + } + throw error; + } + } + + /** + * Retrieves template validation schema for the given XLSX file and survey, and validates the + * XLSX. If a summary submission ID is given, details about template validation schema selection + * are logged. + * @param {XLSXCSV} xlsx + * @param {number} surveyId + * @param {number} [summarySubmissionId] + */ + async summaryTemplateValidation(xlsx: XLSXCSV, surveyId: number, summarySubmissionId?: number) { + defaultLog.debug({ label: 'summaryTemplateValidation', data: { surveyId, summarySubmissionId } }); + try { + const summaryTemplateSpeciesRecords = await this.getSummaryTemplateSpeciesRecords(xlsx, surveyId); + + // In the absence of hard requirements for selecting validation schema in the case when focal species matching + // yields many validation schema, we select the first resulting validation schema. + const templateRecord = summaryTemplateSpeciesRecords[0]; + const validationSchema = templateRecord?.validation; + + // If no validation schema is found, throw an error and abort validation. + if (!validationSchema) { + throw SummarySubmissionErrorFromMessageType(SUMMARY_SUBMISSION_MESSAGE_TYPE.FAILED_GET_VALIDATION_RULES); + } + + // If summarySubmissionId is given, log the particular validation schema that was found. + if (summarySubmissionId) { + const { summary_template_species_id } = templateRecord; + const count = summaryTemplateSpeciesRecords.length; + this.summaryRepository.insertSummarySubmissionMessage( + summarySubmissionId, + SUMMARY_SUBMISSION_MESSAGE_TYPE.FOUND_VALIDATION, + `Found validation having summary template species ID '${summary_template_species_id}' among ${count} record(s).` + ); + } + + const schemaParser = this.getValidationRules(validationSchema); + const csvState = this.validateXLSX(xlsx, schemaParser); + await this.persistSummaryValidationResults(csvState.csv_state, csvState.media_state); + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_VALIDATION); + } + throw error; + } + } + + /** + * Prepares a file for validation. + * @param {any} file + * @returns {XLSXCSV} + */ + prepXLSX(file: any): XLSXCSV { + defaultLog.debug({ label: 'prepXLSX', message: 's3File' }); + const parsedMedia = parseUnknownMedia(file); + + // TODO not sure how to trigger these through testing + if (!parsedMedia) { + throw SummarySubmissionErrorFromMessageType(SUMMARY_SUBMISSION_MESSAGE_TYPE.UNSUPPORTED_FILE_TYPE); + } + + // TODO not sure how to trigger these through testing + if (!(parsedMedia instanceof MediaFile)) { + throw SummarySubmissionErrorFromMessageType(SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_XLSX_CSV); + } + + const xlsxCsv = new XLSXCSV(parsedMedia); + + const sims_name = xlsxCsv.workbook.rawWorkbook.Custprops?.['sims_name']; + const sims_version = xlsxCsv.workbook.rawWorkbook.Custprops?.['sims_version']; + + if (!sims_name || !sims_version) { + throw SummarySubmissionErrorFromMessageType(SUMMARY_SUBMISSION_MESSAGE_TYPE.FAILED_TO_GET_TEMPLATE_NAME_VERSION); + } + + return xlsxCsv; + } + + /** + * Retrieves all summary template species records that are constrained by the template + * name, version and survey focal species. + * @param file + * @param surveyId + * @returns {Promise} + */ + async getSummaryTemplateSpeciesRecords(file: XLSXCSV, surveyId: number): Promise { + const speciesData = await this.surveyService.getSpeciesData(surveyId); + + // Summary template name and version + const sims_name: string = file.workbook.rawWorkbook.Custprops?.['sims_name']; + const sims_version: string = file.workbook.rawWorkbook.Custprops?.['sims_version']; + defaultLog.debug({ + label: 'getSummaryTemplateSpeciesRecord', + data: { + surveyId, + species: speciesData, + sims_name, + sims_version + } + }); + + return this.summaryRepository.getSummaryTemplateSpeciesRecords(sims_name, sims_version, speciesData.focal_species); + } + + /** + * Retrieves validation rules for the given validation schema. + * + * @param {string | object} schema + * @returns {ValidationSchemaParser} + */ + getValidationRules(schema: string | object): ValidationSchemaParser { + defaultLog.debug({ label: 'getValidationRules' }); + try { + const validationSchemaParser = new ValidationSchemaParser(schema); + return validationSchemaParser; + } catch { + throw SummarySubmissionErrorFromMessageType(SUMMARY_SUBMISSION_MESSAGE_TYPE.FAILED_PARSE_VALIDATION_SCHEMA); + } + } + + /** + * Validates a given XLSX file. + * @param {XLSXCSV} file + * @param {ValidationSchemaParser} parser + * @returns {ICsvMediaState} + */ + validateXLSX(file: XLSXCSV, parser: ValidationSchemaParser): ICsvMediaState { + defaultLog.debug({ label: 'validateXLSX' }); + + // Run media validations + file.validateMedia(parser); + + const media_state = file.getMediaState(); + if (!media_state.isValid) { + throw SummarySubmissionErrorFromMessageType(SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + + // Run CSV content validations + file.validateContent(parser); + const csv_state = file.getContentState(); + + return { csv_state, media_state }; + } + + /** + * Persists summary template CSV validation results in the summary submission messages table. + * + * @param {ICsvState[]} csvState + * @param {IMediaState} mediaState + */ + async persistSummaryValidationResults(csvState: ICsvState[], mediaState: IMediaState): Promise { + defaultLog.debug({ label: 'persistSummaryValidationResults', message: 'validationResults' }); + + let parseError = false; + const errors: MessageError[] = []; + + mediaState.fileErrors?.forEach((fileError) => { + errors.push(new MessageError(SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA, `${fileError}`, 'Miscellaneous')); + }); + + csvState?.forEach((csvStateItem) => { + csvStateItem.headerErrors?.forEach((headerError) => { + errors.push( + new MessageError( + SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, + this.generateHeaderErrorMessage(csvStateItem.fileName, headerError), + headerError.errorCode + ) + ); + }); + + csvStateItem.rowErrors?.forEach((rowError) => { + errors.push( + new MessageError( + SUMMARY_SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, + this.generateRowErrorMessage(csvStateItem.fileName, rowError), + rowError.errorCode + ) + ); + }); + + csvStateItem.keyErrors?.forEach((keyError) => { + errors.push( + new MessageError( + SUMMARY_SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY, + this.generateKeyErrorMessage(csvStateItem.fileName, keyError), + keyError.errorCode + ) + ); + }); + + if (!mediaState.isValid || csvState?.some((item) => !item.isValid)) { + // At least 1 error exists, skip remaining steps + parseError = true; + } + }); + + if (parseError) { + throw new SummarySubmissionError({ messages: errors }); + } + } + + /** + * Inserts a message into the summary submission messages table. + * @param {SummarySubmissionError} summarySubmissionId + * @param {SummarySubmissionError} error + * @return {Promise} + */ + async insertSummarySubmissionError(summarySubmissionId: number, error: SummarySubmissionError): Promise { + defaultLog.debug({ label: 'insertSummarySubmissionError', summarySubmissionId, error }); + const promises = error.summarySubmissionMessages.map((message) => { + return this.summaryRepository.insertSummarySubmissionMessage( + summarySubmissionId, + message.type, + message.description + ); + }); + + await Promise.all(promises); + } + + /** + * Generates error messages relating to CSV headers. + * + * @param fileName + * @param headerError + * @returns {string} + */ + generateHeaderErrorMessage(fileName: string, headerError: IHeaderError): string { + return `${fileName} - ${headerError.message} - Column: ${headerError.col}`; + } + + /** + * Generates error messages relating to CSV rows. + * + * @param fileName + * @param rowError + * @returns {string} + */ + generateRowErrorMessage(fileName: string, rowError: IRowError): string { + return `${fileName} - ${rowError.message} - Column: ${rowError.col} - Row: ${rowError.row}`; + } + + /** + * Generates error messages relating to CSV workbook keys. + * + * @param fileName + * @param keyError + * @returns {string} + */ + generateKeyErrorMessage(fileName: string, keyError: IKeyError): string { + return `${fileName} - ${keyError.message} - Rows: ${keyError.rows.join(', ')}`; + } +} diff --git a/api/src/services/survey-service.test.ts b/api/src/services/survey-service.test.ts index 92fb91e889..648c0bf2e6 100644 --- a/api/src/services/survey-service.test.ts +++ b/api/src/services/survey-service.test.ts @@ -1,14 +1,95 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; +import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import { PutSurveyObject } from '../models/survey-update'; +import { MESSAGE_CLASS_NAME, SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { ApiExecuteSQLError, ApiGeneralError } from '../errors/api-error'; +import { GetReportAttachmentsData } from '../models/project-view'; +import { PostProprietorData, PostSurveyObject } from '../models/survey-create'; +import { PutSurveyObject, PutSurveyPermitData } from '../models/survey-update'; +import { + GetAncillarySpeciesData, + GetAttachmentsData, + GetFocalSpeciesData, + GetSurveyData, + GetSurveyFundingSources, + GetSurveyLocationData, + GetSurveyProprietorData, + GetSurveyPurposeAndMethodologyData +} from '../models/survey-view'; +import { IPermitModel } from '../repositories/permit-repository'; +import { + IGetLatestSurveyOccurrenceSubmission, + IGetSpeciesData, + SurveyRepository +} from '../repositories/survey-repository'; import { getMockDBConnection } from '../__mocks__/db'; +import { PermitService } from './permit-service'; import { SurveyService } from './survey-service'; +import { TaxonomyService } from './taxonomy-service'; chai.use(sinonChai); describe('SurveyService', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getSurveyById', () => { + afterEach(() => { + sinon.restore(); + }); + + it('calls all functions and returns survey object', async () => { + const dbConnectionObj = getMockDBConnection(); + + const surveyService = new SurveyService(dbConnectionObj); + + const getSurveyDataStub = sinon + .stub(SurveyService.prototype, 'getSurveyData') + .resolves(({ data: 'surveyData' } as unknown) as any); + const getSpeciesDataStub = sinon + .stub(SurveyService.prototype, 'getSpeciesData') + .resolves(({ data: 'speciesData' } as unknown) as any); + const getPermitDataStub = sinon + .stub(SurveyService.prototype, 'getPermitData') + .resolves(({ data: 'permitData' } as unknown) as any); + const getSurveyFundingSourcesDataStub = sinon + .stub(SurveyService.prototype, 'getSurveyFundingSourcesData') + .resolves(({ data: 'fundingData' } as unknown) as any); + const getSurveyPurposeAndMethodologyStub = sinon + .stub(SurveyService.prototype, 'getSurveyPurposeAndMethodology') + .resolves(({ data: 'purposeAndMethodologyData' } as unknown) as any); + const getSurveyProprietorDataForViewStub = sinon + .stub(SurveyService.prototype, 'getSurveyProprietorDataForView') + .resolves(({ data: 'proprietorData' } as unknown) as any); + const getSurveyLocationDataStub = sinon + .stub(SurveyService.prototype, 'getSurveyLocationData') + .resolves(({ data: 'locationData' } as unknown) as any); + + const response = await surveyService.getSurveyById(1); + + expect(getSurveyDataStub).to.be.calledOnce; + expect(getSpeciesDataStub).to.be.calledOnce; + expect(getPermitDataStub).to.be.calledOnce; + expect(getSurveyFundingSourcesDataStub).to.be.calledOnce; + expect(getSurveyPurposeAndMethodologyStub).to.be.calledOnce; + expect(getSurveyProprietorDataForViewStub).to.be.calledOnce; + expect(getSurveyLocationDataStub).to.be.calledOnce; + + expect(response).to.eql({ + survey_details: { data: 'surveyData' }, + species: { data: 'speciesData' }, + permit: { data: 'permitData' }, + purpose_and_methodology: { data: 'purposeAndMethodologyData' }, + funding: { data: 'fundingData' }, + proprietor: { data: 'proprietorData' }, + location: { data: 'locationData' } + }); + }); + }); + describe('updateSurvey', () => { afterEach(() => { sinon.restore(); @@ -30,11 +111,10 @@ describe('SurveyService', () => { const surveyService = new SurveyService(dbConnectionObj); - const projectId = 1; const surveyId = 2; const putSurveyData = new PutSurveyObject(null); - await surveyService.updateSurvey(projectId, surveyId, putSurveyData); + await surveyService.updateSurvey(surveyId, putSurveyData); expect(updateSurveyDetailsDataStub).not.to.have.been.called; expect(updateSurveyVantageCodesDataStub).not.to.have.been.called; @@ -60,7 +140,6 @@ describe('SurveyService', () => { const surveyService = new SurveyService(dbConnectionObj); - const projectId = 1; const surveyId = 2; const putSurveyData = new PutSurveyObject({ survey_details: {}, @@ -72,7 +151,7 @@ describe('SurveyService', () => { location: {} }); - await surveyService.updateSurvey(projectId, surveyId, putSurveyData); + await surveyService.updateSurvey(surveyId, putSurveyData); expect(updateSurveyDetailsDataStub).to.have.been.calledOnce; expect(updateSurveyVantageCodesDataStub).to.have.been.calledOnce; @@ -82,4 +161,906 @@ describe('SurveyService', () => { expect(updateSurveyProprietorDataStub).to.have.been.calledOnce; }); }); + + describe('getLatestSurveyOccurrenceSubmission', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = ({ id: 1 } as unknown) as IGetLatestSurveyOccurrenceSubmission; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getLatestSurveyOccurrenceSubmission').resolves(data); + + const response = await service.getLatestSurveyOccurrenceSubmission(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyIdsByProjectId', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = [{ id: 1 }]; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSurveyIdsByProjectId').resolves(data); + + const response = await service.getSurveyIdsByProjectId(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveySupplementaryDataById', () => { + afterEach(() => { + sinon.restore(); + }); + + it('Gets data if no errors', async () => { + const getOccurrenceSubmissionIdStub = sinon + .stub(SurveyService.prototype, 'getOccurrenceSubmissionId') + .resolves(({ occurrence_submission: 1 } as unknown) as any); + + const getSummaryResultIdStub = sinon + .stub(SurveyService.prototype, 'getSummaryResultId') + .resolves(({ survey_summary_submission: 1 } as unknown) as any); + + const surveyService = new SurveyService(getMockDBConnection()); + + const response = await surveyService.getSurveySupplementaryDataById(1); + + expect(response).to.eql({ + occurrence_submission: { occurrence_submission: 1 }, + summary_result: { survey_summary_submission: 1 } + }); + expect(getOccurrenceSubmissionIdStub).to.be.calledOnce; + expect(getSummaryResultIdStub).to.be.calledOnce; + }); + }); + + describe('getSurveyData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = new GetSurveyData({ id: 1 }); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSurveyData').resolves(data); + + const response = await service.getSurveyData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSpeciesData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = ({ id: 1 } as unknown) as IGetSpeciesData; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSpeciesData').resolves([data]); + + const serviceStub1 = sinon + .stub(TaxonomyService.prototype, 'getSpeciesFromIds') + .resolves([{ id: '1', label: 'string' }]); + + const response = await service.getSpeciesData(1); + + expect(repoStub).to.be.calledOnce; + expect(serviceStub1).to.be.calledTwice; + expect(response).to.eql({ + ...new GetFocalSpeciesData([{ id: '1', label: 'string' }]), + ...new GetAncillarySpeciesData([{ id: '1', label: 'string' }]) + }); + }); + }); + + describe('getPermitData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns data if valid return', async () => { + const mockPermitResponse: IPermitModel[] = [ + { + permit_id: 1, + survey_id: 1, + number: 'abc', + type: 'Fisheries', + create_date: '2022-02-02', + create_user: 4, + update_date: '2022-02-02', + update_user: 4, + revision_count: 1 + } + ]; + + const mockDBConnection = getMockDBConnection(); + const surveyService = new SurveyService(mockDBConnection); + + const getPermitBySurveyIdStub = sinon + .stub(PermitService.prototype, 'getPermitBySurveyId') + .resolves(mockPermitResponse); + + const response = await surveyService.getPermitData(1); + + expect(getPermitBySurveyIdStub).to.be.calledOnceWith(1); + expect(response).to.eql({ permits: [{ permit_id: 1, permit_number: 'abc', permit_type: 'Fisheries' }] }); + }); + }); + + describe('getSurveyPurposeAndMethodology', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = new GetSurveyPurposeAndMethodologyData({ id: 1 }); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSurveyPurposeAndMethodology').resolves(data); + + const response = await service.getSurveyPurposeAndMethodology(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyFundingSourcesData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = new GetSurveyFundingSources([{ id: 1 }]); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSurveyFundingSourcesData').resolves(data); + + const response = await service.getSurveyFundingSourcesData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyProprietorDataForView', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = new GetSurveyProprietorData([{ id: 1 }]); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSurveyProprietorDataForView').resolves(data); + + const response = await service.getSurveyProprietorDataForView(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSurveyLocationData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = new GetSurveyLocationData([{ id: 1 }]); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSurveyLocationData').resolves(data); + + const response = await service.getSurveyLocationData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getOccurrenceSubmissionId', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getOccurrenceSubmissionId').resolves(data); + + const response = await service.getOccurrenceSubmissionId(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getLatestSurveyOccurrenceSubmission', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = ({ id: 1 } as unknown) as IGetLatestSurveyOccurrenceSubmission; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getLatestSurveyOccurrenceSubmission').resolves(data); + + const response = await service.getLatestSurveyOccurrenceSubmission(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getSummaryResultId', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getSummaryResultId').resolves(data); + + const response = await service.getSummaryResultId(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getAttachmentsData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = ({ id: 1 } as unknown) as GetAttachmentsData; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getAttachmentsData').resolves(data); + + const response = await service.getAttachmentsData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('getReportAttachmentsData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = ({ id: 1 } as unknown) as GetReportAttachmentsData; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getReportAttachmentsData').resolves(data); + + const response = await service.getReportAttachmentsData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertSurveyData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'insertSurveyData').resolves(data); + + const response = await service.insertSurveyData(1, ({ id: 1 } as unknown) as PostSurveyObject); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertFocalSpecies', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'insertFocalSpecies').resolves(data); + + const response = await service.insertFocalSpecies(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertAncillarySpecies', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'insertAncillarySpecies').resolves(data); + + const response = await service.insertAncillarySpecies(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertVantageCodes', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'insertVantageCodes').resolves(data); + + const response = await service.insertVantageCodes(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertSurveyProprietor', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const data = 1; + + const repoStub = sinon.stub(SurveyRepository.prototype, 'insertSurveyProprietor').resolves(data); + + const response = await service.insertSurveyProprietor(({ id: 1 } as unknown) as PostProprietorData, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(data); + }); + }); + + describe('insertOrAssociatePermitToSurvey', () => { + afterEach(() => { + sinon.restore(); + }); + + it('calls associate Survey to permit', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub1 = sinon.stub(SurveyRepository.prototype, 'associateSurveyToPermit').resolves(); + const repoStub2 = sinon.stub(SurveyRepository.prototype, 'insertSurveyPermit').resolves(); + + const response = await service.insertOrAssociatePermitToSurvey(1, 1, 1, 'string', ''); + + expect(repoStub1).to.be.calledOnce; + expect(repoStub2).not.to.be.called; + expect(response).to.eql(undefined); + }); + + it('inserts new survey', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub1 = sinon.stub(SurveyRepository.prototype, 'associateSurveyToPermit').resolves(); + const repoStub2 = sinon.stub(SurveyRepository.prototype, 'insertSurveyPermit').resolves(); + + const response = await service.insertOrAssociatePermitToSurvey(1, 1, 1, 'string', 'string'); + + expect(repoStub1).not.to.be.called; + expect(repoStub2).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('insertSurveyFundingSource', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'insertSurveyFundingSource').resolves(); + + const response = await service.insertSurveyFundingSource(1, 1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveyDetailsData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('throws api error if response is null', async () => { + const mockDBConnection = getMockDBConnection({ knex: async () => (undefined as unknown) as any }); + const surveyService = new SurveyService(mockDBConnection); + + try { + await surveyService.updateSurveyDetailsData(1, ({ survey_details: 'details' } as unknown) as PutSurveyObject); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiGeneralError).message).to.equal('Failed to update survey data'); + } + }); + + it('returns data if response is not null', async () => { + const mockQueryResponse = ({ response: 'something', rowCount: 1 } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ knex: async () => mockQueryResponse }); + const surveyService = new SurveyService(mockDBConnection); + + const response = await surveyService.updateSurveyDetailsData(1, ({ + survey_details: 'details' + } as unknown) as PutSurveyObject); + + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveySpeciesData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns data if response is not null', async () => { + sinon.stub(SurveyService.prototype, 'deleteSurveySpeciesData').resolves(); + sinon.stub(SurveyService.prototype, 'insertFocalSpecies').resolves(1); + sinon.stub(SurveyService.prototype, 'insertAncillarySpecies').resolves(1); + + const mockQueryResponse = ({ response: 'something', rowCount: 1 } as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ knex: async () => mockQueryResponse }); + const surveyService = new SurveyService(mockDBConnection); + + const response = await surveyService.updateSurveySpeciesData(1, ({ + survey_details: 'details', + species: { focal_species: [1], ancillary_species: [1] } + } as unknown) as PutSurveyObject); + + expect(response).to.eql([1, 1]); + }); + }); + + describe('deleteSurveySpeciesData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'deleteSurveySpeciesData').resolves(); + + const response = await service.deleteSurveySpeciesData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveyPermitData', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('with no existing permits', () => { + it('handles permit deletes/updates/creates', async () => { + const mockDBConnection = getMockDBConnection(); + + const getPermitBySurveyIdStub = sinon.stub(PermitService.prototype, 'getPermitBySurveyId').resolves([]); + const deleteSurveyPermitStub = sinon.stub(PermitService.prototype, 'deleteSurveyPermit').resolves(); + const updateSurveyPermitStub = sinon.stub(PermitService.prototype, 'updateSurveyPermit').resolves(); + const createSurveyPermitStub = sinon.stub(PermitService.prototype, 'createSurveyPermit').resolves(); + + const mockPutSurveyObject = { + permit: { + permits: [ + { + permit_id: 2, + permit_number: '1111', + permit_type: 'type1' + }, + { + permit_number: '2222', + permit_type: 'type2' + } + ] + } as PutSurveyPermitData + } as PutSurveyObject; + + const surveyService = new SurveyService(mockDBConnection); + + await surveyService.updateSurveyPermitData(1, mockPutSurveyObject); + + expect(getPermitBySurveyIdStub).to.have.been.calledOnceWith(1); + + expect(deleteSurveyPermitStub).not.to.have.been.called; + + expect(updateSurveyPermitStub).to.have.been.calledOnceWith(1, 2, '1111', 'type1'); + + expect(createSurveyPermitStub).to.have.been.calledOnceWith(1, '2222', 'type2'); + }); + }); + + describe('with existing permits', () => { + it('handles permit deletes/updates/creates', async () => { + const mockDBConnection = getMockDBConnection(); + + const mockExistingPermits = [{ permit_id: 3 }, { permit_id: 4 }] as IPermitModel[]; + + const getPermitBySurveyIdStub = sinon + .stub(PermitService.prototype, 'getPermitBySurveyId') + .resolves(mockExistingPermits); + const deleteSurveyPermitStub = sinon.stub(PermitService.prototype, 'deleteSurveyPermit').resolves(); + const updateSurveyPermitStub = sinon.stub(PermitService.prototype, 'updateSurveyPermit').resolves(); + const createSurveyPermitStub = sinon.stub(PermitService.prototype, 'createSurveyPermit').resolves(); + + const mockPutSurveyObject = { + permit: { + permits: [ + { + permit_id: 2, + permit_number: '1111', + permit_type: 'type1' + }, + { + permit_number: '2222', + permit_type: 'type2' + } + ] + } as PutSurveyPermitData + } as PutSurveyObject; + + const surveyService = new SurveyService(mockDBConnection); + + await surveyService.updateSurveyPermitData(1, mockPutSurveyObject); + + expect(getPermitBySurveyIdStub).to.have.been.calledOnceWith(1); + + expect(deleteSurveyPermitStub).to.have.callCount(2); + expect(deleteSurveyPermitStub).to.have.been.calledWith(1, 3); + expect(deleteSurveyPermitStub).to.have.been.calledWith(1, 4); + + expect(updateSurveyPermitStub).to.have.been.calledOnceWith(1, 2, '1111', 'type1'); + + expect(createSurveyPermitStub).to.have.been.calledOnceWith(1, '2222', 'type2'); + }); + }); + }); + + describe('unassociatePermitFromSurvey', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'unassociatePermitFromSurvey').resolves(); + + const response = await service.unassociatePermitFromSurvey(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveyFundingData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns data if response is not null', async () => { + sinon.stub(SurveyService.prototype, 'deleteSurveyFundingSourcesData').resolves(undefined); + sinon.stub(SurveyService.prototype, 'insertSurveyFundingSource').resolves(undefined); + + const mockQueryResponse = (undefined as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ sql: async () => mockQueryResponse }); + const surveyService = new SurveyService(mockDBConnection); + + const response = await surveyService.updateSurveyFundingData(1, ({ + permit: { permit_number: '1', permit_type: 'type' }, + funding: { funding_sources: [1] } + } as unknown) as PutSurveyObject); + + expect(response).to.eql([undefined]); + }); + }); + + describe('deleteSurveyFundingSourcesData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'deleteSurveyFundingSourcesData').resolves(); + + const response = await service.deleteSurveyFundingSourcesData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveyProprietorData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns undefined', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyService.prototype, 'deleteSurveyProprietorData').resolves(); + + const response = await service.updateSurveyProprietorData(1, ({ + proprietor: { survey_data_proprietary: false } + } as unknown) as PutSurveyObject); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + + it('returns and calls insert', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyService.prototype, 'deleteSurveyProprietorData').resolves(); + const serviceStub = sinon.stub(SurveyService.prototype, 'insertSurveyProprietor').resolves(); + + const response = await service.updateSurveyProprietorData(1, ({ + proprietor: { survey_data_proprietary: 'string' } + } as unknown) as PutSurveyObject); + + expect(repoStub).to.be.calledOnce; + expect(serviceStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('deleteSurveyProprietorData', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'deleteSurveyProprietorData').resolves(); + + const response = await service.deleteSurveyProprietorData(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('updateSurveyVantageCodesData', () => { + afterEach(() => { + sinon.restore(); + }); + + it('returns [] if not vantage_code_ids is given', async () => { + sinon.stub(SurveyService.prototype, 'deleteSurveyVantageCodes').resolves(); + + const mockQueryResponse = (undefined as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ sql: async () => mockQueryResponse }); + const surveyService = new SurveyService(mockDBConnection); + + const response = await surveyService.updateSurveyVantageCodesData(1, ({ + permit: { permit_number: '1', permit_type: 'type' }, + funding: { funding_sources: [1] }, + purpose_and_methodology: { vantage_code_ids: undefined } + } as unknown) as PutSurveyObject); + + expect(response).to.eql([]); + }); + + it('returns data if response is not null', async () => { + sinon.stub(SurveyService.prototype, 'deleteSurveyVantageCodes').resolves(); + sinon.stub(SurveyService.prototype, 'insertVantageCodes').resolves(1); + + const mockQueryResponse = (undefined as unknown) as QueryResult; + + const mockDBConnection = getMockDBConnection({ sql: async () => mockQueryResponse }); + const surveyService = new SurveyService(mockDBConnection); + + const response = await surveyService.updateSurveyVantageCodesData(1, ({ + permit: { permit_number: '1', permit_type: 'type' }, + funding: { funding_sources: [1] }, + proprietor: { survey_data_proprietary: 'asd' }, + purpose_and_methodology: { vantage_code_ids: [1] } + } as unknown) as PutSurveyObject); + + expect(response).to.eql([1]); + }); + }); + + describe('deleteSurveyVantageCodes', () => { + it('returns the first row on success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'deleteSurveyVantageCodes').resolves(); + + const response = await service.deleteSurveyVantageCodes(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(undefined); + }); + }); + + describe('getOccurrenceSubmissionMessages', () => { + it('should return empty array if no messages are found', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getOccurrenceSubmissionMessages').resolves([]); + + const response = await service.getOccurrenceSubmissionMessages(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql([]); + }); + + it('should successfully group messages by message type', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'getOccurrenceSubmissionMessages').resolves([ + { + id: 1, + type: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, + status: SUBMISSION_STATUS_TYPE.FAILED_VALIDATION, + class: MESSAGE_CLASS_NAME.ERROR, + message: 'message 1' + }, + { + id: 2, + type: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, + status: SUBMISSION_STATUS_TYPE.FAILED_VALIDATION, + class: MESSAGE_CLASS_NAME.ERROR, + message: 'message 2' + }, + { + id: 3, + type: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, + status: SUBMISSION_STATUS_TYPE.SUBMITTED, + class: MESSAGE_CLASS_NAME.WARNING, + message: 'message 3' + }, + { + id: 4, + type: SUBMISSION_MESSAGE_TYPE.MISCELLANEOUS, + status: SUBMISSION_STATUS_TYPE.SUBMITTED, + class: MESSAGE_CLASS_NAME.NOTICE, + message: 'message 4' + } + ]); + + const response = await service.getOccurrenceSubmissionMessages(1); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql([ + { + severityLabel: 'Error', + messageTypeLabel: 'Duplicate Header', + messageStatus: 'Failed to validate', + messages: [ + { id: 1, message: 'message 1' }, + { id: 2, message: 'message 2' } + ] + }, + { + severityLabel: 'Warning', + messageTypeLabel: 'Missing Recommended Header', + messageStatus: 'Submitted', + messages: [{ id: 3, message: 'message 3' }] + }, + { + severityLabel: 'Notice', + messageTypeLabel: 'Miscellaneous', + messageStatus: 'Submitted', + messages: [{ id: 4, message: 'message 4' }] + } + ]); + }); + }); + + describe('insertSurveyOccurrenceSubmission', () => { + it('should return submissionId upon success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon + .stub(SurveyRepository.prototype, 'insertSurveyOccurrenceSubmission') + .resolves({ submissionId: 1 }); + + const response = await service.insertSurveyOccurrenceSubmission({ + surveyId: 1, + source: 'Test' + }); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql({ submissionId: 1 }); + }); + + it('should throw an error', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + sinon + .stub(SurveyRepository.prototype, 'insertSurveyOccurrenceSubmission') + .throws(new ApiExecuteSQLError('Failed to insert survey occurrence submission')); + + try { + await service.insertSurveyOccurrenceSubmission({ + surveyId: 1, + source: 'Test' + }); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiGeneralError).message).to.equal('Failed to insert survey occurrence submission'); + } + }); + }); + + describe('updateSurveyOccurrenceSubmission', () => { + it('should return submissionId upon success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon + .stub(SurveyRepository.prototype, 'updateSurveyOccurrenceSubmission') + .resolves({ submissionId: 1 }); + + const response = await service.updateSurveyOccurrenceSubmission({ submissionId: 1 }); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql({ submissionId: 1 }); + }); + + it('should throw an error', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + sinon + .stub(SurveyRepository.prototype, 'updateSurveyOccurrenceSubmission') + .throws(new ApiExecuteSQLError('Failed to update survey occurrence submission')); + + try { + await service.updateSurveyOccurrenceSubmission({ submissionId: 1 }); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiGeneralError).message).to.equal('Failed to update survey occurrence submission'); + } + }); + }); + + describe('deleteOccurrenceSubmission', () => { + it('should return 1 upon success', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + const repoStub = sinon.stub(SurveyRepository.prototype, 'deleteOccurrenceSubmission').resolves(1); + + const response = await service.deleteOccurrenceSubmission(2); + + expect(repoStub).to.be.calledOnce; + expect(response).to.eql(1); + }); + + it('should throw an error upon failure', async () => { + const dbConnection = getMockDBConnection(); + const service = new SurveyService(dbConnection); + + sinon + .stub(SurveyRepository.prototype, 'deleteOccurrenceSubmission') + .throws(new ApiExecuteSQLError('Failed to delete survey occurrence submission')); + + try { + await service.deleteOccurrenceSubmission(2); + expect.fail(); + } catch (actualError) { + expect((actualError as ApiGeneralError).message).to.equal('Failed to delete survey occurrence submission'); + } + }); + }); }); diff --git a/api/src/services/survey-service.ts b/api/src/services/survey-service.ts index aaf0d930cb..620ad5f98f 100644 --- a/api/src/services/survey-service.ts +++ b/api/src/services/survey-service.ts @@ -1,11 +1,13 @@ -import SQL from 'sql-template-strings'; -import { ApiGeneralError } from '../errors/custom-error'; +import { MESSAGE_CLASS_NAME, SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { IDBConnection } from '../database/db'; import { PostProprietorData, PostSurveyObject } from '../models/survey-create'; import { PutSurveyObject } from '../models/survey-update'; import { GetAncillarySpeciesData, + GetAttachmentsData, GetFocalSpeciesData, GetPermitData, + GetReportAttachmentsData, GetSurveyData, GetSurveyFundingSources, GetSurveyLocationData, @@ -14,17 +16,41 @@ import { SurveyObject, SurveySupplementaryData } from '../models/survey-view'; -import { queries } from '../queries/queries'; -import { DBService } from './service'; +import { AttachmentRepository } from '../repositories/attachment-repository'; +import { + IGetLatestSurveyOccurrenceSubmission, + IObservationSubmissionInsertDetails, + IObservationSubmissionUpdateDetails, + IOccurrenceSubmissionMessagesResponse, + SurveyRepository +} from '../repositories/survey-repository'; +import { getLogger } from '../utils/logger'; +import { DBService } from './db-service'; +import { PermitService } from './permit-service'; import { TaxonomyService } from './taxonomy-service'; +const defaultLog = getLogger('services/survey-service'); + +export interface IMessageTypeGroup { + severityLabel: MESSAGE_CLASS_NAME; + messageTypeLabel: SUBMISSION_MESSAGE_TYPE; + messageStatus: SUBMISSION_STATUS_TYPE; + messages: { id: number; message: string }[]; +} + export class SurveyService extends DBService { - async getSurveyIdsByProjectId(projectId: number): Promise<{ id: number }[]> { - const sqlStatement = queries.survey.getSurveyIdsSQL(projectId); + attachmentRepository: AttachmentRepository; + surveyRepository: SurveyRepository; - const response = await this.connection.sql<{ id: number }>(sqlStatement); + constructor(connection: IDBConnection) { + super(connection); - return response.rows; + this.attachmentRepository = new AttachmentRepository(connection); + this.surveyRepository = new SurveyRepository(connection); + } + + async getSurveyIdsByProjectId(projectId: number): Promise<{ id: number }[]> { + return this.surveyRepository.getSurveyIdsByProjectId(projectId); } async getSurveyById(surveyId: number): Promise { @@ -58,64 +84,26 @@ export class SurveyService extends DBService { } async getSurveySupplementaryDataById(surveyId: number): Promise { - const [occurrenceSubmissionId, summaryResultId] = await Promise.all([ + const [submissionId, summaryResultId] = await Promise.all([ this.getOccurrenceSubmissionId(surveyId), this.getSummaryResultId(surveyId) ]); return { - occurrence_submission: occurrenceSubmissionId, + occurrence_submission: submissionId, summary_result: summaryResultId }; } async getSurveyData(surveyId: number): Promise { - const sqlStatement = SQL` - SELECT - * - FROM - survey - WHERE - survey_id = ${surveyId}; - `; - - const response = await this.connection.sql(sqlStatement); - - const result = response.rows?.[0] || null; - - if (!result) { - throw new ApiGeneralError('Failed to get project survey details data'); - } - - return new GetSurveyData(result); + return this.surveyRepository.getSurveyData(surveyId); } async getSpeciesData(surveyId: number): Promise { - const sqlStatement = SQL` - SELECT - wldtaxonomic_units_id, - is_focal - FROM - study_species - WHERE - survey_id = ${surveyId}; - `; - - const response = await this.connection.query<{ wldtaxonomic_units_id: string; is_focal: boolean }>( - sqlStatement.text, - sqlStatement.values - ); - - const result = (response && response.rows) || null; - - if (!result) { - throw new ApiGeneralError('Failed to get survey species data'); - } + const response = await this.surveyRepository.getSpeciesData(surveyId); - const focalSpeciesIds = response.rows.filter((item) => item.is_focal).map((item) => item.wldtaxonomic_units_id); - const ancillarySpeciesIds = response.rows - .filter((item) => !item.is_focal) - .map((item) => item.wldtaxonomic_units_id); + const focalSpeciesIds = response.filter((item) => item.is_focal).map((item) => item.wldtaxonomic_units_id); + const ancillarySpeciesIds = response.filter((item) => !item.is_focal).map((item) => item.wldtaxonomic_units_id); const taxonomyService = new TaxonomyService(); @@ -126,113 +114,73 @@ export class SurveyService extends DBService { } async getPermitData(surveyId: number): Promise { - const sqlStatement = SQL` - SELECT - number, - type - FROM - permit - WHERE - survey_id = ${surveyId}; - `; - - const response = await this.connection.query<{ number: string; type: string }>( - sqlStatement.text, - sqlStatement.values - ); + const permitService = new PermitService(this.connection); - const result = response.rows?.[0]; + const result = await permitService.getPermitBySurveyId(surveyId); return new GetPermitData(result); } async getSurveyPurposeAndMethodology(surveyId: number): Promise { - const sqlStatement = queries.survey.getSurveyPurposeAndMethodologyForUpdateSQL(surveyId); - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows[0]) || null; - - if (!result) { - throw new ApiGeneralError('Failed to get survey purpose and methodology data'); - } - - return new GetSurveyPurposeAndMethodologyData(result); + return this.surveyRepository.getSurveyPurposeAndMethodology(surveyId); } async getSurveyFundingSourcesData(surveyId: number): Promise { - const sqlStatement = queries.survey.getSurveyFundingSourcesDataForViewSQL(surveyId); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL get statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - const result = (response && response.rows) || null; - - if (!result) { - throw new ApiGeneralError('Failed to get survey funding sources data'); - } - - return new GetSurveyFundingSources(result); + return this.surveyRepository.getSurveyFundingSourcesData(surveyId); } async getSurveyProprietorDataForView(surveyId: number): Promise { - const sqlStatement = queries.survey.getSurveyProprietorForUpdateSQL(surveyId); - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response.rows?.[0]) { - return null; - } - - return new GetSurveyProprietorData(response.rows?.[0]); + return this.surveyRepository.getSurveyProprietorDataForView(surveyId); } async getSurveyLocationData(surveyId: number): Promise { - const sqlStatement = SQL` - SELECT - * - FROM - survey - WHERE - survey_id = ${surveyId}; - `; - - const response = await this.connection.sql(sqlStatement); - - const result = response.rows?.[0] || null; - - if (!result) { - throw new ApiGeneralError('Failed to get project survey details data'); - } - - return new GetSurveyLocationData(result); + return this.surveyRepository.getSurveyLocationData(surveyId); } - async getOccurrenceSubmissionId(surveyId: number) { - const sqlStatement = queries.survey.getLatestOccurrenceSubmissionIdSQL(surveyId); + async getOccurrenceSubmissionId(surveyId: number): Promise { + return this.surveyRepository.getOccurrenceSubmissionId(surveyId); + } - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL get statement'); - } + async getLatestSurveyOccurrenceSubmission(surveyId: number): Promise { + return this.surveyRepository.getLatestSurveyOccurrenceSubmission(surveyId); + } - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + /** + * Retrieves all submission messages by the given submission ID, then groups them based on the message type. + * @param {number} submissionId The ID of the submission + * @returns {*} {Promise} Promise resolving the array of message groups containing the submission messages + */ + async getOccurrenceSubmissionMessages(submissionId: number): Promise { + const messages = await this.surveyRepository.getOccurrenceSubmissionMessages(submissionId); + defaultLog.debug({ label: 'getOccurrenceSubmissionMessages', submissionId, messages }); - return (response && response.rows?.[0]) || null; - } + return messages.reduce((typeGroups: IMessageTypeGroup[], message: IOccurrenceSubmissionMessagesResponse) => { + const groupIndex = typeGroups.findIndex((group) => { + return group.messageTypeLabel === message.type; + }); - async getSummaryResultId(surveyId: number) { - const sqlStatement = queries.survey.getLatestSummaryResultIdSQL(surveyId); + const messageObject = { + id: message.id, + message: message.message + }; - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL get statement'); - } + if (groupIndex < 0) { + typeGroups.push({ + severityLabel: message.class, + messageTypeLabel: message.type, + messageStatus: message.status, + messages: [messageObject] + }); + } else { + typeGroups[groupIndex].messages.push(messageObject); + } - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + return typeGroups; + }, []); + } - return (response && response.rows?.[0]) || null; + async getSummaryResultId(surveyId: number): Promise { + return this.surveyRepository.getSummaryResultId(surveyId); } /** @@ -268,17 +216,14 @@ export class SurveyService extends DBService { ); // Handle inserting any permit associated to this survey - if (postSurveyData.permit.permit_number) { - promises.push( - this.insertOrAssociatePermitToSurvey( - this.connection.systemUserId() as number, - projectId, - surveyId, - postSurveyData.permit.permit_number, - postSurveyData.permit.permit_type + const permitService = new PermitService(this.connection); + promises.push( + Promise.all( + postSurveyData.permit.permits.map((permit) => + permitService.createSurveyPermit(surveyId, permit.permit_number, permit.permit_type) ) - ); - } + ) + ); // Handle inserting any funding sources associated to this survey promises.push( @@ -304,94 +249,32 @@ export class SurveyService extends DBService { return surveyId; } - async insertSurveyData(projectId: number, surveyData: PostSurveyObject): Promise { - const sqlStatement = queries.survey.postSurveySQL(projectId, surveyData); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build survey SQL insert statement'); - } - - const response = await this.connection.sql(sqlStatement); - - const result = response.rows[0] || null; + async getAttachmentsData(surveyId: number): Promise { + return this.surveyRepository.getAttachmentsData(surveyId); + } - if (!result) { - throw new ApiGeneralError('Failed to insert survey data'); - } + async getReportAttachmentsData(surveyId: number): Promise { + return this.surveyRepository.getReportAttachmentsData(surveyId); + } - return result.id; + async insertSurveyData(projectId: number, surveyData: PostSurveyObject): Promise { + return this.surveyRepository.insertSurveyData(projectId, surveyData); } async insertFocalSpecies(focal_species_id: number, surveyId: number): Promise { - const sqlStatement = queries.survey.postFocalSpeciesSQL(focal_species_id, surveyId); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new ApiGeneralError('Failed to insert focal species data'); - } - - return result.id; + return this.surveyRepository.insertFocalSpecies(focal_species_id, surveyId); } async insertAncillarySpecies(ancillary_species_id: number, surveyId: number): Promise { - const sqlStatement = queries.survey.postAncillarySpeciesSQL(ancillary_species_id, surveyId); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new ApiGeneralError('Failed to insert ancillary species data'); - } - - return result.id; + return this.surveyRepository.insertAncillarySpecies(ancillary_species_id, surveyId); } async insertVantageCodes(vantage_code_id: number, surveyId: number): Promise { - const sqlStatement = queries.survey.postVantageCodesSQL(vantage_code_id, surveyId); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new ApiGeneralError('Failed to insert ancillary species data'); - } - - return result.id; + return this.surveyRepository.insertVantageCodes(vantage_code_id, surveyId); } async insertSurveyProprietor(survey_proprietor: PostProprietorData, surveyId: number): Promise { - if (!survey_proprietor.survey_data_proprietary) { - return; - } - - const sqlStatement = queries.survey.postSurveyProprietorSQL(surveyId, survey_proprietor); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - const result = (response && response.rows && response.rows[0]) || null; - - if (!result || !result.id) { - throw new ApiGeneralError('Failed to insert survey proprietor data'); - } - - return result.id; + return this.surveyRepository.insertSurveyProprietor(survey_proprietor, surveyId); } async insertOrAssociatePermitToSurvey( @@ -401,36 +284,18 @@ export class SurveyService extends DBService { permitNumber: string, permitType: string ) { - let sqlStatement; - if (!permitType) { - sqlStatement = queries.survey.associateSurveyToPermitSQL(projectId, surveyId, permitNumber); + return this.surveyRepository.associateSurveyToPermit(projectId, surveyId, permitNumber); } else { - sqlStatement = queries.survey.insertSurveyPermitSQL(systemUserId, projectId, surveyId, permitNumber, permitType); - } - - const response = await this.connection.sql(sqlStatement); - - if (!response.rowCount) { - throw new ApiGeneralError('Failed to upsert survey permit record'); + return this.surveyRepository.insertSurveyPermit(systemUserId, projectId, surveyId, permitNumber, permitType); } } async insertSurveyFundingSource(funding_source_id: number, surveyId: number) { - const sqlStatement = queries.survey.insertSurveyFundingSourceSQL(surveyId, funding_source_id); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build SQL statement for insertSurveyFundingSource'); - } - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response) { - throw new ApiGeneralError('Failed to insert survey funding source data'); - } + return this.surveyRepository.insertSurveyFundingSource(funding_source_id, surveyId); } - async updateSurvey(projectId: number, surveyId: number, putSurveyData: PutSurveyObject): Promise { + async updateSurvey(surveyId: number, putSurveyData: PutSurveyObject): Promise { const promises: Promise[] = []; if (putSurveyData?.survey_details || putSurveyData?.purpose_and_methodology || putSurveyData?.location) { @@ -446,7 +311,7 @@ export class SurveyService extends DBService { } if (putSurveyData?.permit) { - promises.push(this.updateSurveyPermitData(projectId, surveyId, putSurveyData)); + promises.push(this.updateSurveyPermitData(surveyId, putSurveyData)); } if (putSurveyData?.funding) { @@ -461,17 +326,7 @@ export class SurveyService extends DBService { } async updateSurveyDetailsData(surveyId: number, surveyData: PutSurveyObject) { - const updateSurveyQueryBuilder = queries.survey.putSurveyDetailsSQL(surveyId, surveyData); - - if (!updateSurveyQueryBuilder) { - throw new ApiGeneralError('Failed to build SQL update statement'); - } - - const result = await this.connection.knex(updateSurveyQueryBuilder); - - if (!result || !result.rowCount) { - throw new ApiGeneralError('Failed to update survey data'); - } + return this.surveyRepository.updateSurveyDetailsData(surveyId, surveyData); } async updateSurveySpeciesData(surveyId: number, surveyData: PutSurveyObject) { @@ -491,43 +346,61 @@ export class SurveyService extends DBService { } async deleteSurveySpeciesData(surveyId: number) { - const sqlStatement = queries.survey.deleteAllSurveySpeciesSQL(surveyId); - - return this.connection.sql(sqlStatement); + return this.surveyRepository.deleteSurveySpeciesData(surveyId); } /** - * To update a survey permit, we need to unassociate (not delete) the old permit from the survey and then associate - * the new permit to the survey. Associating a new permit to the survey is done by either inserting a brand new - * permit record into the permit table and setting the survey id column OR updating an existing permit record by - * setting the survey id column. + * Compares incoming survey permit data against the existing survey permits, if any, and determines which need to be + * deleted, added, or updated. * - * @param {number} projectId * @param {number} surveyId * @param {PutSurveyObject} surveyData - * @return {*} * @memberof SurveyService */ - async updateSurveyPermitData(projectId: number, surveyId: number, surveyData: PutSurveyObject) { - await this.unassociatePermitFromSurvey(surveyId); + async updateSurveyPermitData(surveyId: number, surveyData: PutSurveyObject) { + const permitService = new PermitService(this.connection); - if (!surveyData.permit.permit_number) { - return; + // Get any existing permits for this survey + const existingPermits = await permitService.getPermitBySurveyId(surveyId); + + // Compare the array of existing permits to the array of incoming permits (by permit id) and collect any + // existing permits that are not in the incoming permit array. + const existingPermitsToDelete = existingPermits.filter((existingPermit) => { + // Find all existing permits (by permit id) that have no matching incoming permit id + return !surveyData.permit.permits.find((incomingPermit) => incomingPermit.permit_id === existingPermit.permit_id); + }); + + // Delete from the database all existing survey permits that have been removed + if (existingPermitsToDelete.length) { + const promises: Promise[] = []; + + existingPermitsToDelete.forEach((permit) => { + promises.push(permitService.deleteSurveyPermit(surveyId, permit.permit_id)); + }); + + await Promise.all(promises); } - return this.insertOrAssociatePermitToSurvey( - this.connection.systemUserId() as number, - projectId, - surveyId, - surveyData.permit.permit_number, - surveyData.permit.permit_type - ); - } + // The remaining permits are either new, and can be created, or updates to existing permits + const promises: Promise[] = []; - async unassociatePermitFromSurvey(surveyId: number) { - const sqlStatement = queries.survey.unassociatePermitFromSurveySQL(surveyId); + surveyData.permit.permits.forEach((permit) => { + if (permit.permit_id) { + // Has a permit_id, indicating this is an update to an existing permit + promises.push( + permitService.updateSurveyPermit(surveyId, permit.permit_id, permit.permit_number, permit.permit_type) + ); + } else { + // No permit_id, indicating this is a new permit which needs to be created + promises.push(permitService.createSurveyPermit(surveyId, permit.permit_number, permit.permit_type)); + } + }); - return this.connection.sql(sqlStatement); + return Promise.all(promises); + } + + async unassociatePermitFromSurvey(surveyId: number): Promise { + return this.surveyRepository.unassociatePermitFromSurvey(surveyId); } async updateSurveyFundingData(surveyId: number, surveyData: PutSurveyObject) { @@ -542,10 +415,8 @@ export class SurveyService extends DBService { return Promise.all(promises); } - async deleteSurveyFundingSourcesData(surveyId: number) { - const sqlStatement = queries.survey.deleteSurveyFundingSourcesBySurveyIdSQL(surveyId); - - return this.connection.sql(sqlStatement); + async deleteSurveyFundingSourcesData(surveyId: number): Promise { + return this.surveyRepository.deleteSurveyFundingSourcesData(surveyId); } async updateSurveyProprietorData(surveyId: number, surveyData: PutSurveyObject) { @@ -558,10 +429,8 @@ export class SurveyService extends DBService { return this.insertSurveyProprietor(surveyData.proprietor, surveyId); } - async deleteSurveyProprietorData(surveyId: number) { - const sqlStatement = queries.survey.deleteSurveyProprietorSQL(surveyId); - - return this.connection.sql(sqlStatement); + async deleteSurveyProprietorData(surveyId: number): Promise { + return this.surveyRepository.deleteSurveyProprietorData(surveyId); } async updateSurveyVantageCodesData(surveyId: number, surveyData: PutSurveyObject) { @@ -578,33 +447,45 @@ export class SurveyService extends DBService { return Promise.all(promises); } - async deleteSurveyVantageCodes(surveyId: number) { - const sqlStatement = queries.survey.deleteSurveyVantageCodesSQL(surveyId); - - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + async deleteSurveyVantageCodes(surveyId: number): Promise { + return this.surveyRepository.deleteSurveyVantageCodes(surveyId); + } - if (!response) { - throw new ApiGeneralError('Failed to delete survey vantage codes'); - } + async deleteSurvey(surveyId: number): Promise { + return this.surveyRepository.deleteSurvey(surveyId); } /** - * Update a survey, marking it as published/unpublished. + * Inserts a survey occurrence submission row. * - * @param {number} surveyId - * @param {boolean} publish - * @return {*} {(Promise<{ id: number } | null>)} - * @memberof SurveyService + * @param {IObservationSubmissionInsertDetails} submission The details of the submission + * @return {*} {Promise<{ submissionId: number }>} Promise resolving the ID of the submission upon successful insertion */ - async publishSurvey(surveyId: number, publish: boolean): Promise<{ id: number } | null> { - const sqlStatement = queries.survey.updateSurveyPublishStatusSQL(surveyId, publish); - - if (!sqlStatement) { - throw new ApiGeneralError('Failed to build survey publish SQL statement'); - } + async insertSurveyOccurrenceSubmission( + submission: IObservationSubmissionInsertDetails + ): Promise<{ submissionId: number }> { + return this.surveyRepository.insertSurveyOccurrenceSubmission(submission); + } - const response = await this.connection.sql<{ id: number }>(sqlStatement); + /** + * Updates a survey occurrence submission with the given details. + * + * @param {IObservationSubmissionUpdateDetails} submission The details of the submission to be updated + * @return {*} {Promise<{ submissionId: number }>} Promise resolving the ID of the submission upon successfully updating it + */ + async updateSurveyOccurrenceSubmission( + submission: IObservationSubmissionUpdateDetails + ): Promise<{ submissionId: number }> { + return this.surveyRepository.updateSurveyOccurrenceSubmission(submission); + } - return (response && response.rows && response.rows[0]) || null; + /** + * Soft-deletes an occurrence submission. + * + * @param {number} submissionId The ID of the submission to soft delete + * @returns {*} {number} The row count of the affected records, namely `1` if the delete succeeds, `0` if it does not + */ + async deleteOccurrenceSubmission(submissionId: number): Promise { + return this.surveyRepository.deleteOccurrenceSubmission(submissionId); } } diff --git a/api/src/services/taxonomy-service.test.ts b/api/src/services/taxonomy-service.test.ts index d671d203be..3ac41303c3 100644 --- a/api/src/services/taxonomy-service.test.ts +++ b/api/src/services/taxonomy-service.test.ts @@ -1,14 +1,250 @@ +import { AggregationsAggregate, SearchResponse } from '@elastic/elasticsearch/lib/api/types'; import chai, { expect } from 'chai'; import { describe } from 'mocha'; +import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import { TaxonomyService } from './taxonomy-service'; +import { ITaxonomySource, TaxonomyService } from './taxonomy-service'; chai.use(sinonChai); describe('TaxonomyService', () => { + afterEach(() => { + sinon.restore(); + }); + + const mockElasticResponse: SearchResponse> | undefined = { + took: 0, + timed_out: false, + _shards: { + failed: 0, + successful: 1, + total: 1 + }, + hits: { + hits: [] + } + }; + it('constructs', () => { const taxonomyService = new TaxonomyService(); - expect(taxonomyService).to.be.instanceof(TaxonomyService); }); + + describe('getTaxonomyFromIds', async () => { + afterEach(() => { + sinon.restore(); + }); + + it('should query elasticsearch and return []', async () => { + process.env.ELASTICSEARCH_TAXONOMY_INDEX = 'taxonomy_test_2.0.0'; + + const taxonomyService = new TaxonomyService(); + + const elasticSearchStub = sinon.stub(taxonomyService, '_elasticSearch').resolves(undefined); + + const response = await taxonomyService.getTaxonomyFromIds(['1']); + + expect(elasticSearchStub).to.be.calledOnce; + expect(response).to.eql([]); + }); + + it('should query elasticsearch and return taxonomy', async () => { + process.env.ELASTICSEARCH_TAXONOMY_INDEX = 'taxonomy_test_2.0.0'; + + const taxonomyService = new TaxonomyService(); + + const taxonDetails: Omit = { + unit_name1: 'A', + unit_name2: 'B', + unit_name3: 'C', + taxon_authority: 'taxon_authority', + code: 'D', + tty_kingdom: 'kingdom', + tty_name: 'name', + english_name: 'animal', + note: null + }; + + const elasticSearchStub = sinon.stub(taxonomyService, '_elasticSearch').resolves({ + ...mockElasticResponse, + hits: { + hits: [ + { + _index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, + _id: '1', + _source: { + ...taxonDetails, + end_date: null + } + } + ] + } + }); + + const response = await taxonomyService.getTaxonomyFromIds([1]); + + expect(elasticSearchStub).to.be.calledOnce; + + expect(response).to.eql([{ ...taxonDetails, end_date: null }]); + }); + }); + + describe('getSpeciesFromIds', async () => { + afterEach(() => { + sinon.restore(); + }); + + it('should query elasticsearch and return []', async () => { + process.env.ELASTICSEARCH_TAXONOMY_INDEX = 'taxonomy_test_2.0.0'; + + const taxonomyService = new TaxonomyService(); + + const elasticSearchStub = sinon.stub(taxonomyService, '_elasticSearch').resolves(undefined); + + const response = await taxonomyService.getSpeciesFromIds(['1']); + + expect(elasticSearchStub).to.be.calledOnce; + expect(response).to.eql([]); + }); + + it('should query elasticsearch and return sanitized data', async () => { + process.env.ELASTICSEARCH_TAXONOMY_INDEX = 'taxonomy_test_2.0.0'; + + const taxonomyService = new TaxonomyService(); + + const taxonDetails: Omit = { + unit_name1: 'A', + unit_name2: 'B', + unit_name3: 'C', + taxon_authority: 'taxon_authority', + code: 'D', + tty_kingdom: 'kingdom', + tty_name: 'name', + english_name: 'animal', + note: null + }; + + const elasticSearchStub = sinon.stub(taxonomyService, '_elasticSearch').resolves({ + ...mockElasticResponse, + hits: { + hits: [ + { + _index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, + _id: '1', + _source: { + ...taxonDetails, + end_date: null + } + } + ] + } + }); + + const sanitizeSpeciesDataStub = sinon + .stub(taxonomyService, '_sanitizeSpeciesData') + .returns([{ id: '1', label: 'string' }]); + + const response = await taxonomyService.getSpeciesFromIds([1]); + + expect(elasticSearchStub).to.be.calledOnce; + expect(sanitizeSpeciesDataStub).to.be.calledOnce; + expect(response).to.eql([{ id: '1', label: 'string' }]); + }); + }); + + describe('searchSpecies', async () => { + it('should query elasticsearch', async () => { + process.env.ELASTICSEARCH_TAXONOMY_INDEX = 'taxonomy_test_2.0.0'; + + const taxonomyService = new TaxonomyService(); + + const taxonDetails: Omit = { + unit_name1: 'A', + unit_name2: 'B', + unit_name3: 'C', + taxon_authority: 'taxon_authority', + code: 'D', + tty_kingdom: 'kingdom', + tty_name: 'name', + english_name: 'animal', + note: null + }; + + const elasticSearchStub = sinon.stub(taxonomyService, '_elasticSearch').resolves({ + ...mockElasticResponse, + hits: { + hits: [ + { + _index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, + _id: '1', + _source: { + ...taxonDetails, + end_date: null + } + }, + { + _index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, + _id: '2', + _source: { + ...taxonDetails, + end_date: '2010-01-01' + } + }, + { + _index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, + _id: '3', + _source: { + ...taxonDetails, + end_date: '2040-01-01' + } + } + ] + } + }); + + taxonomyService.searchSpecies('search term'); + + expect(elasticSearchStub).to.be.calledOnce; + }); + }); + + describe('getEnrichedDataForSpeciesCode', async () => { + it('should query elasticsearch', async () => { + process.env.ELASTICSEARCH_TAXONOMY_INDEX = 'taxonomy_test_2.0.0'; + + const taxonomyService = new TaxonomyService(); + + const taxonDetails: Omit = { + unit_name1: 'A', + unit_name2: 'B', + unit_name3: 'C', + taxon_authority: 'taxon_authority', + code: 'D', + tty_kingdom: 'kingdom', + tty_name: 'name', + english_name: 'animal', + note: null + }; + + const elasticSearchStub = sinon.stub(taxonomyService, '_elasticSearch').resolves({ + ...mockElasticResponse, + hits: { + hits: [ + { + _index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, + _id: '1', + _source: { + ...taxonDetails, + end_date: null + } + } + ] + } + }); + + taxonomyService.getEnrichedDataForSpeciesCode('code'); + + expect(elasticSearchStub).to.be.calledOnce; + }); + }); }); diff --git a/api/src/services/taxonomy-service.ts b/api/src/services/taxonomy-service.ts index 506fa81b40..8a919c8e72 100644 --- a/api/src/services/taxonomy-service.ts +++ b/api/src/services/taxonomy-service.ts @@ -1,15 +1,48 @@ import { Client } from '@elastic/elasticsearch'; -import { SearchHit, SearchRequest } from '@elastic/elasticsearch/lib/api/types'; +import { + AggregationsAggregate, + QueryDslBoolQuery, + SearchHit, + SearchRequest, + SearchResponse +} from '@elastic/elasticsearch/lib/api/types'; import { getLogger } from '../utils/logger'; const defaultLog = getLogger('services/taxonomy-service'); +export interface ITaxonomySource { + unit_name1: string; + unit_name2: string; + unit_name3: string; + taxon_authority: string; + code: string; + tty_kingdom: string; + tty_name: string; + english_name: string; + note: string | null; + end_date: string | null; +} + +/** + * + * Service for retreiving and processing taxonomic data from Elasticsearch. + */ export class TaxonomyService { - private async elasticSearch(searchRequest: SearchRequest) { + /** + * + * Performs a query in Elasticsearch based on the given search criteria + * @param {SearchRequest} searchRequest The Elastic search request object + * @returns {Promise> | undefined>} + * Promise resolving the search results from Elasticsearch + */ + async _elasticSearch( + searchRequest: SearchRequest + ): Promise> | undefined> { try { const client = new Client({ node: process.env.ELASTICSEARCH_URL }); + return client.search({ - index: 'taxonomy', + index: process.env.ELASTICSEARCH_TAXONOMY_INDEX, ...searchRequest }); } catch (error) { @@ -17,14 +50,23 @@ export class TaxonomyService { } } - private sanitizeSpeciesData = (data: SearchHit[]) => { - return data.map((item) => { + /** + * + * Sanitizes species data retrieved from Elasticsearch. + * @param {SearchHit[]} data The data response from ElasticSearch + * @returns {{ id: string, label: string }[]} An ID and label pair for each taxonomic code + * @memberof TaxonomyService + */ + _sanitizeSpeciesData = (data: SearchHit[]): { id: string; label: string }[] => { + return data.map((item: SearchHit) => { + const { _id: id, _source } = item; + const label = [ - item._source.code, + _source?.code, [ - [item._source.tty_kingdom, item._source.tty_name].filter(Boolean).join(' '), - [item._source.unit_name1, item._source.unit_name2, item._source.unit_name3].filter(Boolean).join(' '), - item._source.english_name + [_source?.tty_kingdom, _source?.tty_name].filter(Boolean).join(' '), + [_source?.unit_name1, _source?.unit_name2, _source?.unit_name3].filter(Boolean).join(' '), + _source?.english_name ] .filter(Boolean) .join(', ') @@ -32,12 +74,19 @@ export class TaxonomyService { .filter(Boolean) .join(': '); - return { id: item._id, label: label }; + return { id, label }; }); }; - async getTaxonomyFromIds(ids: number[]) { - const response = await this.elasticSearch({ + /** + * + * Searches the taxonomy Elasticsearch index by taxonomic code IDs + * @param {string[] | number[]} ids The array of taxonomic code IDs + * @return {Promise<(ITaxonomySource | undefined)[]>} The source of the response from Elasticsearch + * @memberof TaxonomyService + */ + async getTaxonomyFromIds(ids: string[] | number[]) { + const response = await this._elasticSearch({ query: { terms: { _id: ids @@ -48,8 +97,15 @@ export class TaxonomyService { return (response && response.hits.hits.map((item) => item._source)) || []; } - async getSpeciesFromIds(ids: string[]) { - const response = await this.elasticSearch({ + /** + * + * Searches the taxonomy Elasticsearch index by taxonomic code IDs and santizes the response + * @param {string[] | number[]} ids The array of taxonomic code IDs + * @returns {Promise<{ id: string, label: string}[]>} Promise resolving an ID and label pair for each taxonomic code + * @memberof TaxonomyService + */ + async getSpeciesFromIds(ids: string[] | number[]): Promise<{ id: string; label: string }[]> { + const response = await this._elasticSearch({ query: { terms: { _id: ids @@ -57,15 +113,26 @@ export class TaxonomyService { } }); - return response ? this.sanitizeSpeciesData(response.hits.hits) : []; + return response ? this._sanitizeSpeciesData(response.hits.hits) : []; } - async searchSpecies(term: string) { + /** + * + * Maps a taxonomic search term to an Elasticsearch query, then performs the query and sanitizes the response. + * The query also includes a boolean match to only include records whose `end_date` field is either + * undefined/null or is a date that hasn't occurred yet. This filtering is not done on similar ES queries, + * since we must still be able to search by a given taxonomic code ID, even if is one that is expired. + * + * @param {string} term The search term string + * @returns {Promise<{ id: string, label: string}[]>} Promise resolving an ID and label pair for each taxonomic code + * @memberof TaxonomyService + */ + async searchSpecies(term: string): Promise<{ id: string; label: string }[]> { const searchConfig: object[] = []; const splitTerms = term.split(' '); - splitTerms.forEach((item) => { + splitTerms.forEach((item: string) => { searchConfig.push({ wildcard: { english_name: { value: `*${item}*`, boost: 4.0, case_insensitive: true } @@ -86,14 +153,94 @@ export class TaxonomyService { }); }); - const response = await this.elasticSearch({ + const response = await this._elasticSearch({ query: { bool: { - should: searchConfig - } + must: [ + { + bool: { + should: searchConfig + } + }, + { + bool: { + minimum_should_match: 1, + should: [ + { + bool: { + must_not: { + exists: { + field: 'end_date' + } + } + } + }, + { + range: { + end_date: { + gt: 'now' + } + } + } + ] + } + } + ] + } as QueryDslBoolQuery + } + }); + + return response ? this._sanitizeSpeciesData(response.hits.hits) : []; + } + + _formatEnrichedData = (data: SearchHit): { scientificName: string; englishName: string } => { + const scientificName = + [data._source?.unit_name1, data._source?.unit_name2, data._source?.unit_name3].filter(Boolean).join(' ') || ''; + const englishName = data._source?.english_name || ''; + + return { scientificName, englishName }; + }; + + /** + * Fetch formatted taxonomy information for a specific taxon code. + * + * @param {string} taxonCode + * @return {*} {(Promise<{ scientificName: string; englishName: string } | null>)} + * @memberof TaxonomyService + */ + async getEnrichedDataForSpeciesCode( + taxonCode: string + ): Promise<{ scientificName: string; englishName: string } | null> { + const response = await this._elasticSearch({ + query: { + bool: { + must: [ + { + term: { + 'code.keyword': taxonCode.toUpperCase() + } + }, + { + bool: { + minimum_should_match: 1, + should: [ + { + bool: { + must_not: { + exists: { + field: 'end_date' + } + } + } + } + ] + } + } + ] + } as QueryDslBoolQuery } }); - return response ? this.sanitizeSpeciesData(response.hits.hits) : []; + return response ? this._formatEnrichedData(response.hits.hits[0]) : null; } } diff --git a/api/src/services/user-service.test.ts b/api/src/services/user-service.test.ts index 39fd18d5bc..a5f08ada38 100644 --- a/api/src/services/user-service.test.ts +++ b/api/src/services/user-service.test.ts @@ -1,13 +1,11 @@ import chai, { expect } from 'chai'; import { describe } from 'mocha'; -import { QueryResult } from 'pg'; import sinon from 'sinon'; import sinonChai from 'sinon-chai'; -import SQL from 'sql-template-strings'; import { SYSTEM_IDENTITY_SOURCE } from '../constants/database'; -import { ApiError } from '../errors/custom-error'; +import { ApiError } from '../errors/api-error'; import { UserObject } from '../models/user'; -import { queries } from '../queries/queries'; +import { IGetUser, IInsertUser, UserRepository } from '../repositories/user-repository'; import { getMockDBConnection } from '../__mocks__/db'; import { UserService } from './user-service'; @@ -19,49 +17,53 @@ describe('UserService', () => { sinon.restore(); }); - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); + it('returns a UserObject', async function () { + const mockDBConnection = getMockDBConnection(); - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'getUserByIdSQL').returns(mockUsersByIdSQLResponse); + const mockResponseRow = { system_user_id: 123 }; + const mockUserRepository = sinon.stub(UserRepository.prototype, 'getUserById'); + mockUserRepository.resolves((mockResponseRow as unknown) as IGetUser); const userService = new UserService(mockDBConnection); - try { - await userService.getUserById(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL select statement'); - } + const result = await userService.getUserById(1); + + expect(result).to.eql(new UserObject(mockResponseRow)); + expect(mockUserRepository).to.have.been.calledOnce; }); + }); - it('returns null if the query response has no rows', async function () { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); + describe('getUserByGuid', function () { + afterEach(() => { + sinon.restore(); + }); - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'getUserByIdSQL').returns(mockUsersByIdSQLResponse); + it('returns null if the query response has no rows', async function () { + const mockDBConnection = getMockDBConnection(); + const mockUserRepository = sinon.stub(UserRepository.prototype, 'getUserByGuid'); + mockUserRepository.resolves([]); const userService = new UserService(mockDBConnection); - const result = await userService.getUserById(1); + const result = await userService.getUserByGuid('aaaa'); expect(result).to.be.null; + expect(mockUserRepository).to.have.been.calledOnce; }); it('returns a UserObject for the first row of the response', async function () { - const mockResponseRow = { id: 123 }; - const mockQueryResponse = ({ rows: [mockResponseRow] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); + const mockDBConnection = getMockDBConnection(); - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'getUserByIdSQL').returns(mockUsersByIdSQLResponse); + const mockResponseRow = [{ system_user_id: 123 }]; + const mockUserRepository = sinon.stub(UserRepository.prototype, 'getUserByGuid'); + mockUserRepository.resolves((mockResponseRow as unknown) as IGetUser[]); const userService = new UserService(mockDBConnection); - const result = await userService.getUserById(1); + const result = await userService.getUserByGuid('aaaa'); - expect(result).to.eql(new UserObject(mockResponseRow)); + expect(result).to.eql(new UserObject(mockResponseRow[0])); + expect(mockUserRepository).to.have.been.calledOnce; }); }); @@ -70,49 +72,32 @@ describe('UserService', () => { sinon.restore(); }); - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'getUserByUserIdentifierSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.getUserByIdentifier('identifier'); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL select statement'); - } - }); - it('returns null if the query response has no rows', async function () { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'getUserByUserIdentifierSQL').returns(mockUsersByIdSQLResponse); + const mockDBConnection = getMockDBConnection(); + const mockUserRepository = sinon.stub(UserRepository.prototype, 'getUserByIdentifier'); + mockUserRepository.resolves([]); const userService = new UserService(mockDBConnection); - const result = await userService.getUserByIdentifier('identifier'); + const result = await userService.getUserByIdentifier('aaaa', 'bbbb'); expect(result).to.be.null; + expect(mockUserRepository).to.have.been.calledOnce; }); it('returns a UserObject for the first row of the response', async function () { - const mockResponseRow = { id: 123 }; - const mockQueryResponse = ({ rows: [mockResponseRow] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); + const mockDBConnection = getMockDBConnection(); - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'getUserByUserIdentifierSQL').returns(mockUsersByIdSQLResponse); + const mockResponseRow = [{ system_user_id: 123 }]; + const mockUserRepository = sinon.stub(UserRepository.prototype, 'getUserByIdentifier'); + mockUserRepository.resolves((mockResponseRow as unknown) as IGetUser[]); const userService = new UserService(mockDBConnection); - const result = await userService.getUserByIdentifier('identifier'); + const result = await userService.getUserByIdentifier('aaaa', 'bbbb'); - expect(result).to.eql(new UserObject(mockResponseRow)); + expect(result).to.eql(new UserObject(mockResponseRow[0])); + expect(mockUserRepository).to.have.been.calledOnce; }); }); @@ -121,62 +106,23 @@ describe('UserService', () => { sinon.restore(); }); - it('should throw an error when no sql statement produced', async () => { + it('should not throw an error on success', async () => { const mockDBConnection = getMockDBConnection(); - const userService = new UserService(mockDBConnection); - - sinon.stub(queries.users, 'addSystemUserSQL').returns(null); - - const userIdentifier = 'username'; - const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; - - try { - await userService.addSystemUser(userIdentifier, identitySource); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL insert statement'); - } - }); - - it('should throw an error when response has no rows', async () => { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ query: async () => mockQueryResponse }); + const mockRowObj = { system_user_id: 123 }; + const mockUserRepository = sinon.stub(UserRepository.prototype, 'addSystemUser'); + mockUserRepository.resolves((mockRowObj as unknown) as IInsertUser); const userService = new UserService(mockDBConnection); - sinon.stub(queries.users, 'addSystemUserSQL').returns(SQL`valid sql`); - const userIdentifier = 'username'; + const userGuid = 'aaaa'; const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; - try { - await userService.addSystemUser(userIdentifier, identitySource); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to insert system user'); - } - }); - - it('should not throw an error on success', async () => { - const mockRowObj = { id: 123 }; - const mockQueryResponse = ({ rows: [mockRowObj] } as unknown) as QueryResult; - const mockQuery = sinon.fake.resolves(mockQueryResponse); - const mockDBConnection = getMockDBConnection({ query: mockQuery }); - - const userService = new UserService(mockDBConnection); - - const addSystemUserSQLStub = sinon.stub(queries.users, 'addSystemUserSQL').returns(SQL`valid sql`); - - const userIdentifier = 'username'; - const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; - - const result = await userService.addSystemUser(userIdentifier, identitySource); + const result = await userService.addSystemUser(userGuid, userIdentifier, identitySource); expect(result).to.eql(new UserObject(mockRowObj)); - - expect(addSystemUserSQLStub).to.have.been.calledOnce; - expect(mockQuery).to.have.been.calledOnce; + expect(mockUserRepository).to.have.been.calledOnce; }); }); @@ -185,28 +131,10 @@ describe('UserService', () => { sinon.restore(); }); - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'getUserListSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.listSystemUsers(); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL select statement'); - } - }); - it('returns empty array if the query response has no rows', async function () { - const mockQueryResponse = ({ rows: [] } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'getUserListSQL').returns(mockUsersByIdSQLResponse); + const mockDBConnection = getMockDBConnection(); + const mockUserRepository = sinon.stub(UserRepository.prototype, 'listSystemUsers'); + mockUserRepository.resolves([]); const userService = new UserService(mockDBConnection); @@ -216,25 +144,20 @@ describe('UserService', () => { }); it('returns a UserObject for each row of the response', async function () { - const mockResponseRow1 = { id: 123 }; - const mockResponseRow2 = { id: 456 }; - const mockResponseRow3 = { id: 789 }; - const mockQueryResponse = ({ - rows: [mockResponseRow1, mockResponseRow2, mockResponseRow3] - } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); + const mockDBConnection = getMockDBConnection(); - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'getUserListSQL').returns(mockUsersByIdSQLResponse); + const mockResponseRows = [{ system_user_id: 123 }, { system_user_id: 456 }, { system_user_id: 789 }]; + const mockUserRepository = sinon.stub(UserRepository.prototype, 'listSystemUsers'); + mockUserRepository.resolves(mockResponseRows as IGetUser[]); const userService = new UserService(mockDBConnection); const result = await userService.listSystemUsers(); expect(result).to.eql([ - new UserObject(mockResponseRow1), - new UserObject(mockResponseRow2), - new UserObject(mockResponseRow3) + new UserObject(mockResponseRows[0]), + new UserObject(mockResponseRows[1]), + new UserObject(mockResponseRows[2]) ]); }); }); @@ -245,29 +168,28 @@ describe('UserService', () => { }); it('throws an error if it fails to get the current system user id', async () => { - const mockDBConnection = getMockDBConnection({ systemUserId: () => null }); + const mockDBConnection = getMockDBConnection({ systemUserId: () => (null as unknown) as number }); const existingSystemUser = null; - const getUserByIdentifierStub = sinon - .stub(UserService.prototype, 'getUserByIdentifier') - .resolves(existingSystemUser); + const getUserByGuidStub = sinon.stub(UserService.prototype, 'getUserByGuid').resolves(existingSystemUser); const addSystemUserStub = sinon.stub(UserService.prototype, 'addSystemUser'); const activateSystemUserStub = sinon.stub(UserService.prototype, 'activateSystemUser'); const userIdentifier = 'username'; + const userGuid = 'aaaa'; const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; const userService = new UserService(mockDBConnection); try { - await userService.ensureSystemUser(userIdentifier, identitySource); + await userService.ensureSystemUser(userGuid, userIdentifier, identitySource); expect.fail(); } catch (actualError) { expect((actualError as ApiError).message).to.equal('Failed to identify system user ID'); } - expect(getUserByIdentifierStub).to.have.been.calledOnce; + expect(getUserByGuidStub).to.have.been.calledOnce; expect(addSystemUserStub).not.to.have.been.called; expect(activateSystemUserStub).not.to.have.been.called; }); @@ -276,9 +198,7 @@ describe('UserService', () => { const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); const existingSystemUser = null; - const getUserByIdentifierStub = sinon - .stub(UserService.prototype, 'getUserByIdentifier') - .resolves(existingSystemUser); + const getUserByGuidStub = sinon.stub(UserService.prototype, 'getUserByGuid').resolves(existingSystemUser); const addedSystemUser = new UserObject({ system_user_id: 2, record_end_date: null }); const addSystemUserStub = sinon.stub(UserService.prototype, 'addSystemUser').resolves(addedSystemUser); @@ -286,16 +206,17 @@ describe('UserService', () => { const activateSystemUserStub = sinon.stub(UserService.prototype, 'activateSystemUser'); const userIdentifier = 'username'; + const userGuid = 'aaaa'; const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; const userService = new UserService(mockDBConnection); - const result = await userService.ensureSystemUser(userIdentifier, identitySource); + const result = await userService.ensureSystemUser(userGuid, userIdentifier, identitySource); expect(result.id).to.equal(2); expect(result.record_end_date).to.equal(null); - expect(getUserByIdentifierStub).to.have.been.calledOnce; + expect(getUserByGuidStub).to.have.been.calledOnce; expect(addSystemUserStub).to.have.been.calledOnce; expect(activateSystemUserStub).not.to.have.been.called; }); @@ -310,68 +231,28 @@ describe('UserService', () => { role_ids: [1], role_names: ['Editor'] }); - const getUserByIdentifierStub = sinon - .stub(UserService.prototype, 'getUserByIdentifier') - .resolves(existingInactiveSystemUser); + const getUserByGuidStub = sinon.stub(UserService.prototype, 'getUserByGuid').resolves(existingInactiveSystemUser); const addSystemUserStub = sinon.stub(UserService.prototype, 'addSystemUser'); const activateSystemUserStub = sinon.stub(UserService.prototype, 'activateSystemUser'); const userIdentifier = 'username'; + const userGuid = 'aaaa'; const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; const userService = new UserService(mockDBConnection); - const result = await userService.ensureSystemUser(userIdentifier, identitySource); + const result = await userService.ensureSystemUser(userGuid, userIdentifier, identitySource); expect(result.id).to.equal(2); expect(result.record_end_date).to.equal(null); - expect(getUserByIdentifierStub).to.have.been.calledOnce; + expect(getUserByGuidStub).to.have.been.calledOnce; expect(addSystemUserStub).not.to.have.been.called; expect(activateSystemUserStub).not.to.have.been.called; }); - it('throws an error if it fails to get the newly activated user', async () => { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const existingSystemUser = new UserObject({ - system_user_id: 2, - user_identifier: SYSTEM_IDENTITY_SOURCE.IDIR, - record_end_date: '2021-11-22', - role_ids: [1], - role_names: ['Editor'] - }); - const getUserByIdentifierStub = sinon - .stub(UserService.prototype, 'getUserByIdentifier') - .resolves(existingSystemUser); - - const addSystemUserStub = sinon.stub(UserService.prototype, 'addSystemUser'); - - const activateSystemUserStub = sinon.stub(UserService.prototype, 'activateSystemUser'); - - const activatedSystemUser = null; - const getUserByIdStub = sinon.stub(UserService.prototype, 'getUserById').resolves(activatedSystemUser); - - const userIdentifier = 'username'; - const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; - - const userService = new UserService(mockDBConnection); - - try { - await userService.ensureSystemUser(userIdentifier, identitySource); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to ensure system user'); - } - - expect(getUserByIdentifierStub).to.have.been.calledOnce; - expect(addSystemUserStub).not.to.have.been.called; - expect(activateSystemUserStub).to.have.been.calledOnce; - expect(getUserByIdStub).to.have.been.calledOnce; - }); - it('gets an existing system user that is not already active and re-activates it', async () => { const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); @@ -382,9 +263,7 @@ describe('UserService', () => { role_ids: [1], role_names: ['Editor'] }); - const getUserByIdentifierStub = sinon - .stub(UserService.prototype, 'getUserByIdentifier') - .resolves(existingSystemUser); + const getUserByGuidStub = sinon.stub(UserService.prototype, 'getUserByGuid').resolves(existingSystemUser); const addSystemUserStub = sinon.stub(UserService.prototype, 'addSystemUser'); @@ -400,172 +279,32 @@ describe('UserService', () => { const getUserByIdStub = sinon.stub(UserService.prototype, 'getUserById').resolves(activatedSystemUser); const userIdentifier = 'username'; + const userGuid = 'aaaa'; const identitySource = SYSTEM_IDENTITY_SOURCE.IDIR; const userService = new UserService(mockDBConnection); - const result = await userService.ensureSystemUser(userIdentifier, identitySource); + const result = await userService.ensureSystemUser(userGuid, userIdentifier, identitySource); expect(result.id).to.equal(2); expect(result.record_end_date).to.equal(null); - expect(getUserByIdentifierStub).to.have.been.calledOnce; + expect(getUserByGuidStub).to.have.been.calledOnce; expect(addSystemUserStub).not.to.have.been.called; expect(activateSystemUserStub).to.have.been.calledOnce; expect(getUserByIdStub).to.have.been.calledOnce; }); }); - describe('activateSystemUser', function () { - afterEach(() => { - sinon.restore(); - }); - - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'activateSystemUserSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.activateSystemUser(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL update statement'); - } - }); - - it('throws an error if the query response has no rowCount', async function () { - const mockQueryResponse = ({ rowCount: 0 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'activateSystemUserSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.activateSystemUser(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to activate system user'); - } - }); - - it('returns nothing on success', async function () { - const mockQueryResponse = ({ rowCount: 1 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'activateSystemUserSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - const result = await userService.activateSystemUser(1); - - expect(result).to.be.undefined; - }); - }); - - describe('deactivateSystemUser', function () { - afterEach(() => { - sinon.restore(); - }); - - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'deactivateSystemUserSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.deactivateSystemUser(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL update statement'); - } - }); - - it('throws an error if the query response has no rowCount', async function () { - const mockQueryResponse = ({ rowCount: 0 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'deactivateSystemUserSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.deactivateSystemUser(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to deactivate system user'); - } - }); - - it('returns nothing on success', async function () { - const mockQueryResponse = ({ rowCount: 1 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'deactivateSystemUserSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - const result = await userService.deactivateSystemUser(1); - - expect(result).to.be.undefined; - }); - }); - describe('deleteUserSystemRoles', function () { afterEach(() => { sinon.restore(); }); - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'deleteAllSystemRolesSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.deleteUserSystemRoles(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL delete statement'); - } - }); - - it('throws an error if the query response has no rowCount', async function () { - const mockQueryResponse = ({ rowCount: 0 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'deleteAllSystemRolesSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.deleteUserSystemRoles(1); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to delete user system roles'); - } - }); - it('returns nothing on success', async function () { - const mockQueryResponse = ({ rowCount: 1 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'deleteAllSystemRolesSQL').returns(mockUsersByIdSQLResponse); + const mockDBConnection = getMockDBConnection(); + const mockUserRepository = sinon.stub(UserRepository.prototype, 'deleteUserSystemRoles'); + mockUserRepository.resolves(); const userService = new UserService(mockDBConnection); @@ -574,57 +313,4 @@ describe('UserService', () => { expect(result).to.be.undefined; }); }); - - describe('addUserSystemRoles', function () { - afterEach(() => { - sinon.restore(); - }); - - it('should throw an error when no sql statement produced', async function () { - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1 }); - - const mockUsersByIdSQLResponse = null; - sinon.stub(queries.users, 'postSystemRolesSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.addUserSystemRoles(1, [1]); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to build SQL insert statement'); - } - }); - - it('throws an error if the query response has no rowCount', async function () { - const mockQueryResponse = ({ rowCount: 0 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'postSystemRolesSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - try { - await userService.addUserSystemRoles(1, [1]); - expect.fail(); - } catch (actualError) { - expect((actualError as ApiError).message).to.equal('Failed to insert user system roles'); - } - }); - - it('returns nothing on success', async function () { - const mockQueryResponse = ({ rowCount: 1 } as unknown) as QueryResult; - const mockDBConnection = getMockDBConnection({ systemUserId: () => 1, query: async () => mockQueryResponse }); - - const mockUsersByIdSQLResponse = SQL`Test SQL Statement`; - sinon.stub(queries.users, 'postSystemRolesSQL').returns(mockUsersByIdSQLResponse); - - const userService = new UserService(mockDBConnection); - - const result = await userService.addUserSystemRoles(1, [1]); - - expect(result).to.be.undefined; - }); - }); }); diff --git a/api/src/services/user-service.ts b/api/src/services/user-service.ts index 1967e1098c..c5dcf923d2 100644 --- a/api/src/services/user-service.ts +++ b/api/src/services/user-service.ts @@ -1,7 +1,10 @@ -import { ApiBuildSQLError, ApiExecuteSQLError } from '../errors/custom-error'; +import { IDBConnection } from '../database/db'; +import { ApiBuildSQLError, ApiExecuteSQLError } from '../errors/api-error'; import { UserObject } from '../models/user'; import { queries } from '../queries/queries'; -import { DBService } from './service'; +import { UserRepository } from '../repositories/user-repository'; +import { getLogger } from '../utils/logger'; +import { DBService } from './db-service'; export type ListSystemUsers = { id: number; @@ -11,43 +14,70 @@ export type ListSystemUsers = { role_names: string[]; }; +const defaultLog = getLogger('services/user-service'); + +/** + * @TODO Replace all implementations of `queries/users/user-queries` with appropriate UserRepository methods. + */ export class UserService extends DBService { + userRepository: UserRepository; + + constructor(connection: IDBConnection) { + super(connection); + + this.userRepository = new UserRepository(connection); + } + /** - * Fetch a single system user by their ID. + * Fetch a single system user by their system user ID. * * @param {number} systemUserId + * @return {*} {(Promise)} + * @memberof UserService + */ + async getUserById(systemUserId: number): Promise { + const response = await this.userRepository.getUserById(systemUserId); + + return new UserObject(response); + } + + /** + * Get an existing system user by their GUID. + * + * @param {string} userGuid The user's GUID * @return {*} {(Promise)} * @memberof UserService */ - async getUserById(systemUserId: number): Promise { - const sqlStatement = queries.users.getUserByIdSQL(systemUserId); + async getUserByGuid(userGuid: string): Promise { + defaultLog.debug({ label: 'getUserByGuid', userGuid }); - if (!sqlStatement) { - throw new ApiBuildSQLError('Failed to build SQL select statement'); - } + const response = await this.userRepository.getUserByGuid(userGuid); - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + if (response.length !== 1) { + return null; + } - return (response?.rows?.[0] && new UserObject(response.rows[0])) || null; + return new UserObject(response[0]); } /** - * Get an existing system user. + * Get an existing system user by their user identifier and identity source. * - * @param {string} userIdentifier - * @return {*} {(Promise)} + * @param userIdentifier the user's identifier + * @param identitySource the user's identity source, e.g. `'IDIR'` + * @return {*} {(Promise)} Promise resolving the UserObject, or `null` if the user wasn't found. * @memberof UserService */ - async getUserByIdentifier(userIdentifier: string): Promise { - const sqlStatement = queries.users.getUserByUserIdentifierSQL(userIdentifier); + async getUserByIdentifier(userIdentifier: string, identitySource: string): Promise { + defaultLog.debug({ label: 'getUserByIdentifier', userIdentifier, identitySource }); - if (!sqlStatement) { - throw new ApiBuildSQLError('Failed to build SQL select statement'); - } + const response = await this.userRepository.getUserByIdentifier(userIdentifier, identitySource); - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); + if (response.length !== 1) { + return null; + } - return (response?.rows?.[0] && new UserObject(response.rows[0])) || null; + return new UserObject(response[0]); } /** @@ -55,27 +85,16 @@ export class UserService extends DBService { * * Note: Will fail if the system user already exists. * + * @param {string | null} userGuid * @param {string} userIdentifier * @param {string} identitySource * @return {*} {Promise} * @memberof UserService */ - async addSystemUser(userIdentifier: string, identitySource: string): Promise { - const addSystemUserSQLStatement = queries.users.addSystemUserSQL(userIdentifier, identitySource); - - if (!addSystemUserSQLStatement) { - throw new ApiBuildSQLError('Failed to build SQL insert statement'); - } - - const response = await this.connection.query(addSystemUserSQLStatement.text, addSystemUserSQLStatement.values); - - const userObject = (response?.rows?.[0] && new UserObject(response.rows[0])) || null; + async addSystemUser(userGuid: string | null, userIdentifier: string, identitySource: string): Promise { + const response = await this.userRepository.addSystemUser(userGuid, userIdentifier, identitySource); - if (!userObject) { - throw new ApiExecuteSQLError('Failed to insert system user'); - } - - return userObject; + return new UserObject(response); } /** @@ -85,33 +104,26 @@ export class UserService extends DBService { * @memberof UserService */ async listSystemUsers(): Promise { - const getUserListSQLStatement = queries.users.getUserListSQL(); - - if (!getUserListSQLStatement) { - throw new ApiBuildSQLError('Failed to build SQL select statement'); - } - - const getUserListResponse = await this.connection.query( - getUserListSQLStatement.text, - getUserListSQLStatement.values - ); + const response = await this.userRepository.listSystemUsers(); - return getUserListResponse.rows.map((row) => new UserObject(row)); + return response.map((row) => new UserObject(row)); } /** * Gets a system user, adding them if they do not already exist, or activating them if they had been deactivated (soft * deleted). * + * @param {string | null} userGuid * @param {string} userIdentifier * @param {string} identitySource - * @param {IDBConnection} connection * @return {*} {Promise} * @memberof UserService */ - async ensureSystemUser(userIdentifier: string, identitySource: string): Promise { + async ensureSystemUser(userGuid: string | null, userIdentifier: string, identitySource: string): Promise { // Check if the user exists in SIMS - let userObject = await this.getUserByIdentifier(userIdentifier); + let userObject = userGuid + ? await this.getUserByGuid(userGuid) + : await this.getUserByIdentifier(userIdentifier, identitySource); if (!userObject) { // Id of the current authenticated user @@ -122,7 +134,7 @@ export class UserService extends DBService { } // Found no existing user, add them - userObject = await this.addSystemUser(userIdentifier, identitySource); + userObject = await this.addSystemUser(userGuid, userIdentifier, identitySource); } if (!userObject.record_end_date) { @@ -198,11 +210,7 @@ export class UserService extends DBService { throw new ApiBuildSQLError('Failed to build SQL delete statement'); } - const response = await this.connection.query(sqlStatement.text, sqlStatement.values); - - if (!response.rowCount) { - throw new ApiExecuteSQLError('Failed to delete user system roles'); - } + await this.connection.query(sqlStatement.text, sqlStatement.values); } /** diff --git a/api/src/services/validation-service.test.ts b/api/src/services/validation-service.test.ts new file mode 100644 index 0000000000..bfa960d6c8 --- /dev/null +++ b/api/src/services/validation-service.test.ts @@ -0,0 +1,1221 @@ +import chai, { expect } from 'chai'; +import { describe } from 'mocha'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import xlsx from 'xlsx'; +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { ITemplateMethodologyData } from '../repositories/validation-repository'; +import * as FileUtils from '../utils/file-utils'; +import { ICsvState } from '../utils/media/csv/csv-file'; +import { DWCArchive } from '../utils/media/dwc/dwc-archive-file'; +import { ArchiveFile, IMediaState, MediaFile } from '../utils/media/media-file'; +import * as MediaUtils from '../utils/media/media-utils'; +import { ValidationSchemaParser } from '../utils/media/validation/validation-schema-parser'; +import { TransformSchema } from '../utils/media/xlsx/transformation/xlsx-transform-schema-parser'; +import { XLSXCSV } from '../utils/media/xlsx/xlsx-file'; +import { SubmissionError, SubmissionErrorFromMessageType } from '../utils/submission-error'; +import { getMockDBConnection } from '../__mocks__/db'; +import { OccurrenceService } from './occurrence-service'; +import { ValidationService } from './validation-service'; + +chai.use(sinonChai); + +const mockService = () => { + const dbConnection = getMockDBConnection(); + return new ValidationService(dbConnection); +}; + +const mockOccurrenceSubmission = { + occurrence_submission_id: 1, + survey_id: 1, + template_methodology_species_id: 1, + source: '', + input_key: 'input key', + input_file_name: '', + output_key: 'output key', + output_file_name: '', + darwin_core_source: {} +}; + +const buildFile = (fileName: string, customProps: { template_id?: number; csm_id?: number }) => { + const newWorkbook = xlsx.utils.book_new(); + newWorkbook.Custprops = {}; + + if (customProps.csm_id && customProps.template_id) { + newWorkbook.Custprops['sims_template_id'] = customProps.template_id; + newWorkbook.Custprops['sims_csm_id'] = customProps.csm_id; + } + + const ws_name = 'SheetJS'; + + // make worksheet + const ws_data = [ + ['S', 'h', 'e', 'e', 't', 'J', 'S'], + [1, 2, 3, 4, 5] + ]; + const ws = xlsx.utils.aoa_to_sheet(ws_data); + + // Add the worksheet to the workbook + xlsx.utils.book_append_sheet(newWorkbook, ws, ws_name); + + const buffer = xlsx.write(newWorkbook, { type: 'buffer' }); + + return new MediaFile(fileName, 'text/csv', buffer); +}; + +describe('ValidationService', () => { + afterEach(() => { + sinon.restore(); + }); + + describe('getValidationSchema', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid schema', async () => { + const service = mockService(); + sinon.stub(ValidationService.prototype, 'getTemplateMethodologySpeciesRecord').resolves({ + template_methodology_species_id: 1, + wldtaxonomic_units_id: '1', + validation: '{}', + transform: ('{}' as unknown) as TransformSchema + }); + + const file = new XLSXCSV(buildFile('testFile', { template_id: 1, csm_id: 1 })); + const schema = await service.getValidationSchema(file, 1); + expect(schema).to.be.not.null; + }); + + it('should throw Failed to get validation rules error', async () => { + const service = mockService(); + sinon + .stub(ValidationService.prototype, 'getTemplateMethodologySpeciesRecord') + .resolves(({} as unknown) as ITemplateMethodologyData); + + try { + const file = new XLSXCSV(buildFile('testFile', { template_id: 1, csm_id: 1 })); + await service.getValidationSchema(file, 1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceof(SubmissionError); + if (error instanceof SubmissionError) { + expect(error.submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.FAILED_GET_VALIDATION_RULES); + } + } + }); + }); + + describe('getTransformationSchema', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid schema', async () => { + const service = mockService(); + sinon.stub(ValidationService.prototype, 'getTemplateMethodologySpeciesRecord').resolves({ + template_methodology_species_id: 1, + wldtaxonomic_units_id: '1', + validation: '{}', + transform: ('{}' as unknown) as TransformSchema + }); + + const file = new XLSXCSV(buildFile('testFile', { template_id: 1, csm_id: 1 })); + const schema = await service.getTransformationSchema(file, 1); + expect(schema).to.be.not.null; + }); + + it('should throw Failed to get transformation rules error', async () => { + const service = mockService(); + sinon + .stub(ValidationService.prototype, 'getTemplateMethodologySpeciesRecord') + .resolves(({} as unknown) as ITemplateMethodologyData); + + try { + const file = new XLSXCSV(buildFile('testFile', { template_id: 1, csm_id: 1 })); + await service.getTransformationSchema(file, 1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + if (error instanceof SubmissionError) { + expect(error.submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.FAILED_GET_TRANSFORMATION_RULES); + } + } + }); + }); + + describe('templateValidation', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should complete without error', async () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + + const xlsxCsv = new XLSXCSV(file); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + + const getValidation = sinon.stub(ValidationService.prototype, 'getValidationSchema').resolves(''); + const getRules = sinon.stub(ValidationService.prototype, 'getValidationRules').resolves(''); + const validate = sinon.stub(ValidationService.prototype, 'validateXLSX').resolves({}); + const persistResults = sinon.stub(ValidationService.prototype, 'persistValidationResults').resolves(true); + + const service = mockService(); + await service.templateValidation(xlsxCsv, 1); + + expect(getValidation).to.be.calledOnce; + expect(getRules).to.be.calledOnce; + expect(validate).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + }); + + it('should throw Failed to validate error', async () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const xlsxCsv = new XLSXCSV(file); + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + + sinon.stub(ValidationService.prototype, 'getValidationSchema').throws(new SubmissionError({})); + sinon.stub(ValidationService.prototype, 'getValidationRules').resolves({}); + sinon.stub(ValidationService.prototype, 'validateXLSX').resolves({}); + sinon.stub(ValidationService.prototype, 'persistValidationResults').resolves(true); + + try { + const dbConnection = getMockDBConnection(); + const service = new ValidationService(dbConnection); + await service.templateValidation(xlsxCsv, 1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + if (error instanceof SubmissionError) { + expect(error.status).to.be.eql(SUBMISSION_STATUS_TYPE.FAILED_VALIDATION); + } + } + }); + }); + + describe('templatePreparation', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid S3 key and xlsx object', async () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const s3Key = 's3 key'; + sinon.stub(FileUtils, 'getFileFromS3').resolves('file from s3' as any); + sinon.stub(ValidationService.prototype, 'prepXLSX').returns(new XLSXCSV(file)); + sinon.stub(OccurrenceService.prototype, 'getOccurrenceSubmission').resolves({ + occurrence_submission_id: 1, + survey_id: 1, + template_methodology_species_id: 1, + source: '', + input_key: s3Key, + input_file_name: '', + output_key: '', + output_file_name: '', + darwin_core_source: {} + }); + + const service = mockService(); + const results = await service.templatePreparation(1); + + expect(results.xlsx).to.not.be.empty; + expect(results.xlsx).to.be.instanceOf(XLSXCSV); + expect(results.s3InputKey).to.be.eql(s3Key); + }); + + it('throws Failed to prepare submission error', async () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const s3Key = 's3 key'; + sinon.stub(FileUtils, 'getFileFromS3').throws(new SubmissionError({})); + sinon.stub(ValidationService.prototype, 'prepXLSX').resolves(new XLSXCSV(file)); + sinon.stub(OccurrenceService.prototype, 'getOccurrenceSubmission').resolves({ + occurrence_submission_id: 1, + survey_id: 1, + template_methodology_species_id: 1, + source: '', + input_key: s3Key, + input_file_name: '', + output_key: '', + output_file_name: '', + darwin_core_source: {} + }); + + try { + const dbConnection = getMockDBConnection(); + const service = new ValidationService(dbConnection); + await service.templatePreparation(1); + + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + if (error instanceof SubmissionError) { + expect(error.status).to.be.eql(SUBMISSION_STATUS_TYPE.FAILED_OCCURRENCE_PREPARATION); + } + } + }); + }); + + describe('prepXLSX', () => { + afterEach(() => { + sinon.restore(); + }); + it('should return valid XLSXCSV', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(file); + sinon.stub(XLSXCSV, 'prototype').returns({ + workbook: { + rawWorkbook: { + Custprops: { + sims_template_id: 1, + sims_csm_id: 1 + } + } + } + }); + + const service = mockService(); + try { + const xlsx = service.prepXLSX(file); + expect(xlsx).to.not.be.empty; + expect(xlsx).to.be.instanceOf(XLSXCSV); + } catch (error) { + expect(parse).to.be.calledOnce; + } + }); + + it('should throw File submitted is not a supported type error', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(null); + + const service = mockService(); + try { + service.prepXLSX(file); + expect.fail(); + } catch (error) { + if (error instanceof SubmissionError) { + expect(error.submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.UNSUPPORTED_FILE_TYPE); + } + + expect(error).to.be.instanceOf(SubmissionError); + expect(parse).to.be.calledOnce; + } + }); + + it('should throw Media is invalid error', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(('a file' as unknown) as MediaFile); + + const service = mockService(); + try { + service.prepXLSX(file); + expect.fail(); + } catch (error) { + if (error instanceof SubmissionError) { + expect(error.submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + + expect(error).to.be.instanceOf(SubmissionError); + expect(parse).to.be.calledOnce; + } + }); + + it('should throw Unable to get transform schema for submission error', () => { + const file = new MediaFile('test.txt', 'text/plain', Buffer.of(0)); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(file); + + const service = mockService(); + try { + service.prepXLSX(file); + expect.fail(); + } catch (error) { + if (error instanceof SubmissionError) { + expect(error.submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.FAILED_TO_GET_TRANSFORM_SCHEMA); + } + + expect(error).to.be.instanceOf(SubmissionError); + expect(parse).to.be.calledOnce; + } + }); + }); + + describe('persistValidationResults', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should throw a submission error with multiple messages attached', async () => { + const service = mockService(); + const csvState: ICsvState[] = [ + { + fileName: '', + isValid: false, + keyErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY, + message: 'Key error', + colNames: ['col1', 'col2'], + rows: [2, 3, 4] + } + ], + headerErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, + message: '', + col: 'Effort & Effects' + } + ], + rowErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, + message: 'No bueno', + col: 'Block SU', + row: 1 + } + ] + } + ]; + const mediaState: IMediaState = { + fileName: 'Test.xlsx', + isValid: true + }; + try { + await service.persistValidationResults(csvState, mediaState); + expect.fail(); + } catch (error) { + if (error instanceof SubmissionError) { + expect(error.status).to.be.eql(SUBMISSION_STATUS_TYPE.REJECTED); + expect(error.submissionMessages.length).to.be.equal(3); + } + } + }); + + it('should return false if no errors are present', async () => { + const service = mockService(); + const csvState: ICsvState[] = []; + const mediaState: IMediaState = { + fileName: 'Test.xlsx', + isValid: true + }; + const response = await service.persistValidationResults(csvState, mediaState); + // no errors found, data is valid + expect(response).to.be.false; + }); + }); + + describe('getValidationRules', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return validation schema parser', () => { + const service = mockService(); + + const parser = service.getValidationRules({}); + expect(parser).to.be.instanceOf(ValidationSchemaParser); + }); + + it('should fail with invalid json', () => { + const service = mockService(); + sinon + .stub(service, 'getValidationRules') + .throws(new Error('ValidationSchemaParser - provided json was not valid JSON')); + try { + service.getValidationRules('---'); + expect.fail(); + } catch (error) { + expect((error as Error).message).to.be.eql('ValidationSchemaParser - provided json was not valid JSON'); + } + }); + }); + + describe('transformFile', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should run without issue', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const transform = sinon.stub(service, 'templateTransformation').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + + await service.transformFile(1, 1); + expect(prep).to.be.calledOnce; + expect(transform).to.be.calledOnce; + expect(submissionStatus).to.be.calledOnce; + }); + + it('should insert submission error', async () => { + const service = mockService(); + const prep = sinon + .stub(service, 'templatePreparation') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_OCCURRENCE)); + const transform = sinon.stub(service, 'templateTransformation').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + + try { + await service.transformFile(1, 1); + expect(prep).to.be.calledOnce; + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect(transform).not.to.be.calledOnce; + expect(submissionStatus).not.to.be.calledOnce; + expect(insertError).to.be.calledOnce; + } + }); + + it('should throw error', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const transform = sinon.stub(service, 'templateTransformation').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').throws(); + + try { + await service.transformFile(1, 1); + expect(prep).to.be.calledOnce; + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect(transform).not.to.be.calledOnce; + expect(submissionStatus).not.to.be.calledOnce; + expect(insertError).not.to.be.calledOnce; + expect.fail(); + } + }); + }); + + describe('validateFile', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should run without issue', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const validation = sinon.stub(service, 'templateValidation').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + + await service.validateFile(1, 1); + expect(prep).to.be.calledOnce; + expect(validation).to.be.calledOnce; + expect(submissionStatus).to.be.calledOnce; + }); + + it('should insert submission error', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const validation = sinon + .stub(service, 'templateValidation') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.MISSING_VALIDATION_SCHEMA)); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + + try { + await service.validateFile(1, 1); + expect(prep).to.be.calledOnce; + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect(insertError).to.be.calledOnce; + expect(validation).not.to.be.calledOnce; + expect(submissionStatus).not.to.be.calledOnce; + } + }); + + it('should throw', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const validation = sinon.stub(service, 'templateValidation').throws(new Error()); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + + try { + await service.validateFile(1, 1); + expect(prep).to.be.calledOnce; + expect(validation).to.be.calledOnce; + } catch (error) { + expect(error).not.to.be.instanceOf(SubmissionError); + expect(insertError).not.to.be.calledOnce; + expect(submissionStatus).not.to.be.calledOnce; + } + }); + }); + + describe('processDWCFile', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should run without issue', async () => { + const service = mockService(); + + const mockPrep = { + s3InputKey: 'input_key', + archive: new DWCArchive(new ArchiveFile('test', 'application/zip', Buffer.from([]), [buildFile('test', {})])) + }; + const mockState = { + csv_state: [], + media_state: { + fileName: 'test', + fileErrors: [], + isValid: true + } + }; + + const prep = sinon.stub(service, 'dwcPreparation').resolves(mockPrep); + const state = sinon.stub(service, 'validateDWC').returns(mockState); + const persistResults = sinon.stub(service, 'persistValidationResults').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const normalize = sinon.stub(service, 'normalizeDWCArchive').resolves(); + const decorate = sinon.stub(service.dwCService, 'decorateDwCJSON').resolves(); + const update = sinon.stub(service.occurrenceService, 'updateDWCSourceForOccurrenceSubmission').resolves(); + const scrape = sinon.stub(service, 'scrapeDwCAndUploadOccurrences').resolves(); + + const workbookBuffer = sinon.stub(service, 'createWorkbookFromJSON').resolves('buffer'); + const upload = sinon + .stub(service, 'uploadDwCWorkbookToS3') + .resolves({ outputFileName: 'outputfilename', s3OutputKey: 's3outputkey' }); + + const update2 = sinon.stub(service.occurrenceService, 'updateSurveyOccurrenceSubmissionWithOutputKey').resolves(); + + await service.processDWCFile(1); + expect(prep).to.be.calledOnce; + expect(state).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + expect(decorate).to.be.calledOnce; + expect(update).to.be.calledOnce; + expect(submissionStatus).to.be.called; + expect(normalize).to.be.called; + expect(scrape).to.be.called; + expect(workbookBuffer).to.be.called; + expect(upload).to.be.called; + expect(update2).to.be.calledWith(1, 'outputfilename', 's3outputkey'); + }); + + it('should insert submission error from prep failure', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + archive: new DWCArchive(new ArchiveFile('test', 'application/zip', Buffer.from([]), [buildFile('test', {})])) + }; + const mockState = { + csv_state: [], + media_state: { + fileName: 'test', + fileErrors: [], + isValid: true + } + }; + + const prep = sinon.stub(service, 'dwcPreparation').resolves(mockPrep); + const state = sinon.stub(service, 'validateDWC').returns(mockState); + const persistResults = sinon.stub(service, 'persistValidationResults').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const normalize = sinon.stub(service, 'normalizeDWCArchive').resolves(); + const decorate = sinon.stub(service.dwCService, 'decorateDwCJSON').resolves(); + const update = sinon + .stub(service.occurrenceService, 'updateDWCSourceForOccurrenceSubmission') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPDATE_OCCURRENCE_SUBMISSION)); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + + try { + await service.processDWCFile(1); + expect.fail(); + } catch (error) { + expect(prep).to.be.calledOnce; + expect(state).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + expect(submissionStatus).to.be.calledOnce; + expect(normalize).to.be.calledOnce; + expect(decorate).to.be.calledOnce; + expect(update).to.be.calledOnce; + + expect(insertError).to.be.calledOnce; + } + }); + + it('should throw unrecognized error', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: '', + archive: new DWCArchive(new ArchiveFile('test', 'application/zip', Buffer.from([]), [buildFile('test', {})])) + }; + const mockState = { + csv_state: [], + media_state: { + fileName: 'test', + fileErrors: [], + isValid: true + } + }; + + const prep = sinon.stub(service, 'dwcPreparation').resolves(mockPrep); + const state = sinon.stub(service, 'validateDWC').returns(mockState); + const persistResults = sinon.stub(service, 'persistValidationResults').resolves(); + const submissionStatus = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const normalize = sinon.stub(service, 'normalizeDWCArchive').resolves(); + const decorate = sinon.stub(service.dwCService, 'decorateDwCJSON').resolves(); + const update = sinon.stub(service.occurrenceService, 'updateDWCSourceForOccurrenceSubmission').throws(); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + + try { + await service.processDWCFile(1); + expect(prep).to.be.calledOnce; + expect(state).to.be.calledOnce; + expect(persistResults).to.be.calledOnce; + expect(update).to.be.calledOnce; + expect(submissionStatus).to.be.calledOnce; + expect(normalize).to.be.calledOnce; + expect(decorate).to.be.calledOnce; + } catch (error) { + expect(error).not.to.be.instanceOf(SubmissionError); + expect(insertError).not.to.be.calledOnce; + } + }); + }); + + describe('processXLSXFile', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should insert submission error - for failing to transform', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: 'input key', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const validate = sinon.stub(service, 'templateValidation').resolves(); + const transform = sinon + .stub(service, 'templateTransformation') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_TRANSFORM_XLSX)); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + sinon.stub(service, 'scrapeDwCAndUploadOccurrences').resolves(); + sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + + try { + await service.processXLSXFile(1, 1); + expect.fail(); + } catch { + expect(prep).to.be.calledOnce; + expect(validate).to.be.calledOnce; + expect(transform).to.be.calledOnce; + expect(insertError).to.be.calledOnce; + } + }); + + it('should throw unrecognized error', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: 'input key', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const validate = sinon.stub(service, 'templateValidation').resolves(); + const transform = sinon.stub(service, 'templateTransformation').throws(); + const insertError = sinon.stub(service.errorService, 'insertSubmissionError').resolves(); + sinon.stub(service, 'scrapeDwCAndUploadOccurrences').resolves(); + sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + + try { + await service.processXLSXFile(1, 1); + expect(prep).to.be.calledOnce; + expect(validate).to.be.calledOnce; + expect(transform).to.be.calledOnce; + } catch (error) { + expect(error).not.to.be.instanceOf(SubmissionError); + expect(insertError).not.to.be.calledOnce; + } + }); + + it('should run without error', async () => { + const service = mockService(); + const mockPrep = { + s3InputKey: 'input key', + xlsx: new XLSXCSV(buildFile('test file', {})) + }; + + const prep = sinon.stub(service, 'templatePreparation').resolves(mockPrep); + const validate = sinon.stub(service, 'templateValidation').resolves(); + const status = sinon.stub(service.submissionRepository, 'insertSubmissionStatus').resolves(); + const transform = sinon.stub(service, 'templateTransformation').resolves(); + const decorate = sinon.stub(service.dwCService, 'decorateDwCJSON').resolves(); + const update = sinon.stub(service.occurrenceService, 'updateDWCSourceForOccurrenceSubmission').resolves(); + const upload = sinon.stub(service, 'scrapeDwCAndUploadOccurrences').resolves(); + const workbook = sinon.stub(service, 'createWorkbookFromJSON').returns([]); + const uploadWorkbook = sinon + .stub(service, 'uploadDwCWorkbookToS3') + .resolves({ outputFileName: '', s3OutputKey: '' }); + const updatedOutput = sinon + .stub(service.occurrenceService, 'updateSurveyOccurrenceSubmissionWithOutputKey') + .resolves(); + + await service.processXLSXFile(1, 1); + expect(prep).to.be.calledOnce; + expect(validate).to.be.calledOnce; + expect(transform).to.be.calledOnce; + expect(upload).to.be.calledOnce; + expect(status).to.be.calledTwice; + expect(decorate).to.be.calledOnce; + expect(update).to.be.calledOnce; + expect(workbook).to.be.calledOnce; + expect(uploadWorkbook).to.be.calledOnce; + expect(updatedOutput).to.be.calledOnce; + }); + }); + + describe('dwcPreparation', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return archive and input key', async () => { + const service = mockService(); + const archive = new DWCArchive(new ArchiveFile('', '', Buffer.from([]), [])); + const occurrence = sinon + .stub(service.occurrenceService, 'getOccurrenceSubmission') + .resolves(mockOccurrenceSubmission); + const s3 = sinon.stub(FileUtils, 'getFileFromS3').resolves(); + const prep = sinon.stub(service, 'prepDWCArchive').returns(archive); + + const results = await service.dwcPreparation(1); + expect(results.s3InputKey).to.not.be.empty; + expect(occurrence).to.be.calledOnce; + expect(s3).to.be.calledOnce; + expect(prep).to.be.calledOnce; + }); + + it('should throw Failed to process occurrence error', async () => { + const service = mockService(); + const archive = new DWCArchive(new ArchiveFile('', '', Buffer.from([]), [])); + sinon + .stub(service.occurrenceService, 'getOccurrenceSubmission') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_OCCURRENCE)); + sinon.stub(FileUtils, 'getFileFromS3').resolves(); + sinon.stub(service, 'prepDWCArchive').returns(archive); + + try { + await service.dwcPreparation(1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).status).to.be.eql(SUBMISSION_STATUS_TYPE.FAILED_PROCESSING_OCCURRENCE_DATA); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql( + SUBMISSION_MESSAGE_TYPE.FAILED_GET_OCCURRENCE + ); + } + }); + + it('should throw Failed to process occurrence data with S3 messages', async () => { + const service = mockService(); + const archive = new DWCArchive(new ArchiveFile('', '', Buffer.from([]), [])); + sinon.stub(service.occurrenceService, 'getOccurrenceSubmission').resolves(mockOccurrenceSubmission); + sinon + .stub(FileUtils, 'getFileFromS3') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_FILE_FROM_S3)); + sinon.stub(service, 'prepDWCArchive').returns(archive); + + try { + await service.dwcPreparation(1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).status).to.be.eql(SUBMISSION_STATUS_TYPE.FAILED_PROCESSING_OCCURRENCE_DATA); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql( + SUBMISSION_MESSAGE_TYPE.FAILED_GET_FILE_FROM_S3 + ); + } + }); + + it('should throw Media is invalid error', async () => { + const service = mockService(); + sinon.stub(service.occurrenceService, 'getOccurrenceSubmission').resolves(mockOccurrenceSubmission); + sinon.stub(FileUtils, 'getFileFromS3').resolves(); + sinon + .stub(service, 'prepDWCArchive') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA)); + + try { + await service.dwcPreparation(1); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).status).to.be.eql(SUBMISSION_STATUS_TYPE.FAILED_PROCESSING_OCCURRENCE_DATA); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + }); + }); + + describe('validateDWC', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid ICsvMediaState', async () => { + const service = mockService(); + const mockDWCArchive = new DWCArchive( + new ArchiveFile('test', 'application/zip', Buffer.from([]), [buildFile('test', {})]) + ); + + const response = await service.validateDWC(mockDWCArchive); + expect(response.media_state.isValid).to.be.true; + expect(response.media_state.fileErrors).is.empty; + }); + + it('should return file validation errors', async () => { + const service = mockService(); + const mockDWCArchive = new DWCArchive( + new ArchiveFile('test', 'application/zip', Buffer.from([]), [buildFile('test', {})]) + ); + const mockState = { + fileName: 'test', + isValid: false, + keyErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY, + message: 'Key error', + colNames: ['col1', 'col2'], + rows: [2, 3, 4] + } + ], + headerErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, + message: 'Duplicate Header found', + col: 1 + } + ], + rowErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, + message: 'Missing required field', + col: '1', + row: 1 + } + ] + } as ICsvState; + sinon.stub(DWCArchive.prototype, 'getContentState').returns([mockState]); + const response = await service.validateDWC(mockDWCArchive); + expect(response.csv_state).is.not.empty; + expect(response.csv_state[0].headerErrors).is.not.empty; + expect(response.csv_state[0].rowErrors).is.not.empty; + expect(response.csv_state[0].keyErrors).is.not.empty; + }); + + it('should throw Failed to validate error', async () => { + const service = mockService(); + const mockDWCArchive = new DWCArchive( + new ArchiveFile('test', 'application/zip', Buffer.from([]), [buildFile('test', {})]) + ); + const mockState = { + fileName: '', + fileErrors: ['some file error'], + isValid: false + } as IMediaState; + sinon.stub(DWCArchive.prototype, 'getMediaState').returns(mockState); + try { + await service.validateDWC(mockDWCArchive); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + }); + }); + + describe('scrapeDwCAndUploadOccurrences', () => { + it('should run without issue', async () => { + const service = mockService(); + + const scrape = sinon.stub(service.spatialService, 'runSpatialTransforms').resolves(); + + await service.scrapeDwCAndUploadOccurrences(1); + + expect(scrape).to.be.calledOnce; + }); + + it('should throw Submission Error', async () => { + const service = mockService(); + + const scrape = sinon + .stub(service.spatialService, 'runSpatialTransforms') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA)); + + try { + await service.scrapeDwCAndUploadOccurrences(1); + + expect(scrape).to.be.calledOnce; + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + } + }); + }); + + describe('templateTransformation', () => { + it('should run without issue', async () => { + const file = buildFile('test file', { csm_id: 1, template_id: 1 }); + const xlsxCsv = new XLSXCSV(file); + const fileBuffer = { + name: '', + buffer: Buffer.from([]) + } as any; + + const service = mockService(); + + const getTransformation = sinon + .stub(service, 'getTransformationSchema') + .resolves(({} as unknown) as TransformSchema); + const transform = sinon.stub(service, 'transformXLSX').resolves([fileBuffer]); + + await service.templateTransformation(xlsxCsv, 1); + + expect(getTransformation).to.be.calledOnce; + expect(transform).to.be.calledOnce; + }); + + it('should Submission Error', async () => { + const file = buildFile('test file', { csm_id: 1, template_id: 1 }); + const xlsxCsv = new XLSXCSV(file); + + const service = mockService(); + + const getTransformation = sinon + .stub(service, 'getTransformationSchema') + .resolves(({} as unknown) as TransformSchema); + const transform = sinon + .stub(service, 'transformXLSX') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPLOAD_FILE_TO_S3)); + + try { + await service.templateTransformation(xlsxCsv, 1); + expect(getTransformation).to.be.calledOnce; + expect(transform).to.be.calledOnce; + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + } + }); + }); + + describe('validateXLSX', () => { + it('should return valid state object', async () => { + const service = mockService(); + const xlsx = new XLSXCSV(buildFile('test file', {})); + const parser = new ValidationSchemaParser({}); + const response = await service.validateXLSX(xlsx, parser); + + expect(response.media_state.isValid).to.be.true; + expect(response.media_state.fileErrors).is.empty; + }); + + it('should throw Media is invalid error', async () => { + const service = mockService(); + const mockMediaState = { + fileName: 'test file', + isValid: false + } as IMediaState; + const xlsx = new XLSXCSV(buildFile('test file', {})); + const parser = new ValidationSchemaParser({}); + + sinon.stub(XLSXCSV.prototype, 'getMediaState').returns(mockMediaState); + + try { + await service.validateXLSX(xlsx, parser); + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + }); + + it('should return valid state object with content errors', async () => { + const service = mockService(); + const mockState = { + fileName: 'test', + isValid: false, + keyErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY, + message: 'Key error', + colNames: ['col1', 'col2'], + rows: [2, 3, 4] + } + ], + headerErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, + message: 'Duplicate header found', + col: 1 + } + ], + rowErrors: [ + { + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, + message: 'Missing required field', + col: '1', + row: 1 + } + ] + } as ICsvState; + const xlsx = new XLSXCSV(buildFile('test file', {})); + const parser = new ValidationSchemaParser({}); + sinon.stub(DWCArchive.prototype, 'validateContent'); + sinon.stub(XLSXCSV.prototype, 'getContentState').returns([mockState]); + + const response = await service.validateXLSX(xlsx, parser); + expect(response.csv_state).is.not.empty; + expect(response.csv_state[0].headerErrors).is.not.empty; + expect(response.csv_state[0].rowErrors).is.not.empty; + expect(response.csv_state[0].keyErrors).is.not.empty; + }); + }); + + describe('validateDWCArchive', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return valid ICsvMediaState object', () => { + const service = mockService(); + + sinon.stub(DWCArchive.prototype, 'validateMedia'); + sinon.stub(DWCArchive.prototype, 'validateContent'); + const mock = sinon.stub(DWCArchive.prototype, 'getMediaState').returns({ + isValid: true, + fileName: '' + }); + + const dwcArchive = new DWCArchive(new ArchiveFile('', '', Buffer.from([]), [])); + const csvMediaState = service.validateDWCArchive(dwcArchive, {} as ValidationSchemaParser); + expect(mock).to.be.calledOnce; + expect(csvMediaState).has.property('csv_state'); + expect(csvMediaState).has.property('media_state'); + }); + + it('should throw Media is invalid error', () => { + const service = mockService(); + sinon.stub(DWCArchive.prototype, 'validateMedia'); + const mock = sinon.stub(DWCArchive.prototype, 'getMediaState').returns({ + isValid: false, + fileName: '' + }); + + try { + const dwcArchive = new DWCArchive(new ArchiveFile('', '', Buffer.from([]), [])); + service.validateDWCArchive(dwcArchive, {} as ValidationSchemaParser); + expect(mock).to.be.calledOnce; + expect.fail(); + } catch (error) { + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + }); + }); + + describe('prepDWCArchive', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should return a DWCArchive', async () => { + const service = mockService(); + const fileName = 'test file'; + const parse = sinon + .stub(MediaUtils, 'parseUnknownMedia') + .returns(new ArchiveFile(fileName, '', Buffer.from([]), [])); + + const archive = await service.prepDWCArchive({} as ArchiveFile); + expect(archive.rawFile.fileName).to.be.eql(fileName); + expect(parse).to.be.calledOnce; + }); + + it('should throw Media is invalid error', async () => { + const service = mockService(); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(null); + + try { + await service.prepDWCArchive({} as ArchiveFile); + expect.fail(); + } catch (error) { + expect(parse).to.be.calledOnce; + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + }); + + it('should throw File submitted is not a supported type error', async () => { + const service = mockService(); + const parse = sinon.stub(MediaUtils, 'parseUnknownMedia').returns(new MediaFile('', '', Buffer.from([]))); + + try { + await service.prepDWCArchive({} as ArchiveFile); + expect.fail(); + } catch (error) { + expect(parse).to.be.calledOnce; + expect(error).to.be.instanceOf(SubmissionError); + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql( + SUBMISSION_MESSAGE_TYPE.UNSUPPORTED_FILE_TYPE + ); + } + }); + }); + + describe('uploadDwCWorkbookToS3', () => { + afterEach(() => { + sinon.restore(); + }); + + it('should run without error', async () => { + const service = mockService(); + const xlsx = new XLSXCSV(buildFile('', { template_id: 1, csm_id: 1 })); + + const s3 = sinon.stub(FileUtils, 'uploadBufferToS3').resolves(); + + await service.uploadDwCWorkbookToS3(1, [], 'outputKey', xlsx); + expect(s3).to.be.calledOnce; + }); + + it('should throw Failed to upload file to S3 error', async () => { + const service = mockService(); + const xlsx = new XLSXCSV(buildFile('', { template_id: 1, csm_id: 1 })); + + const s3 = sinon + .stub(FileUtils, 'uploadBufferToS3') + .throws(SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPLOAD_FILE_TO_S3)); + + try { + await service.uploadDwCWorkbookToS3(1, [], 'outputKey', xlsx); + expect(s3).to.be.calledOnce; + expect.fail(); + } catch (error) { + expect((error as SubmissionError).submissionMessages[0].type).to.be.eql( + SUBMISSION_MESSAGE_TYPE.FAILED_UPLOAD_FILE_TO_S3 + ); + } + }); + }); +}); diff --git a/api/src/services/validation-service.ts b/api/src/services/validation-service.ts new file mode 100644 index 0000000000..a54204b587 --- /dev/null +++ b/api/src/services/validation-service.ts @@ -0,0 +1,582 @@ +import AdmZip from 'adm-zip'; +import xlsx from 'xlsx'; +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE } from '../constants/status'; +import { IDBConnection } from '../database/db'; +import { SubmissionRepository } from '../repositories/submission-repository'; +import { ITemplateMethodologyData, ValidationRepository } from '../repositories/validation-repository'; +import { getFileFromS3, uploadBufferToS3 } from '../utils/file-utils'; +import { getLogger } from '../utils/logger'; +import { ICsvState, IHeaderError, IKeyError, IRowError } from '../utils/media/csv/csv-file'; +import { DWCArchive } from '../utils/media/dwc/dwc-archive-file'; +import { ArchiveFile, IMediaState, MediaFile } from '../utils/media/media-file'; +import { parseUnknownMedia } from '../utils/media/media-utils'; +import { ValidationSchemaParser } from '../utils/media/validation/validation-schema-parser'; +import { XLSXTransform } from '../utils/media/xlsx/transformation/xlsx-transform'; +import { TransformSchema } from '../utils/media/xlsx/transformation/xlsx-transform-schema-parser'; +import { XLSXCSV } from '../utils/media/xlsx/xlsx-file'; +import { MessageError, SubmissionError, SubmissionErrorFromMessageType } from '../utils/submission-error'; +import { DBService } from './db-service'; +import { DwCService } from './dwc-service'; +import { ErrorService } from './error-service'; +import { OccurrenceService } from './occurrence-service'; +import { SpatialService } from './spatial-service'; +import { SurveyService } from './survey-service'; + +const defaultLog = getLogger('services/validation-service'); + +interface ICsvMediaState { + csv_state: ICsvState[]; + media_state: IMediaState; +} + +interface IFileBuffer { + name: string; + buffer: Buffer; +} +export class ValidationService extends DBService { + validationRepository: ValidationRepository; + submissionRepository: SubmissionRepository; + surveyService: SurveyService; + occurrenceService: OccurrenceService; + spatialService: SpatialService; + dwCService: DwCService; + errorService: ErrorService; + + constructor(connection: IDBConnection) { + super(connection); + this.validationRepository = new ValidationRepository(connection); + this.submissionRepository = new SubmissionRepository(connection); + this.surveyService = new SurveyService(connection); + this.occurrenceService = new OccurrenceService(connection); + this.spatialService = new SpatialService(connection); + this.dwCService = new DwCService(connection); + this.errorService = new ErrorService(connection); + } + + async transformFile(submissionId: number, surveyId: number) { + defaultLog.debug({ label: 'transformFile', submissionId, surveyId }); + try { + const submissionPrep = await this.templatePreparation(submissionId); + await this.templateTransformation(submissionPrep.xlsx, surveyId); + + // insert template transformed status + await this.submissionRepository.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.TEMPLATE_TRANSFORMED); + } catch (error) { + if (error instanceof SubmissionError) { + await this.errorService.insertSubmissionError(submissionId, error); + } else { + throw error; + } + } + } + + async validateFile(submissionId: number, surveyId: number) { + defaultLog.debug({ label: 'validateFile', submissionId, surveyId }); + try { + const submissionPrep = await this.templatePreparation(submissionId); + await this.templateValidation(submissionPrep.xlsx, surveyId); + + // insert template validated status + await this.submissionRepository.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.TEMPLATE_VALIDATED); + } catch (error) { + if (error instanceof SubmissionError) { + await this.errorService.insertSubmissionError(submissionId, error); + } else { + throw error; + } + } + } + + /** + * Process a DwCA file. + * + * @param {number} submissionId + * @memberof ValidationService + */ + async processDWCFile(submissionId: number) { + defaultLog.debug({ label: 'processDWCFile', submissionId }); + try { + // Prepare DwC + const dwcPrep = await this.dwcPreparation(submissionId); + + // Run DwC validations + const csvState = this.validateDWC(dwcPrep.archive); + + // Insert results of validation + await this.persistValidationResults(csvState.csv_state, csvState.media_state); + + // Insert validation complete status + await this.submissionRepository.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.TEMPLATE_VALIDATED); + + // Normalize DwC source + const normalizedDWC = this.normalizeDWCArchive(dwcPrep.archive); + + // Apply decorations to DwC + const decoratedDWC = await this.dwCService.decorateDwCJSON(normalizedDWC); + + await this.occurrenceService.updateDWCSourceForOccurrenceSubmission(submissionId, JSON.stringify(decoratedDWC)); + + // Run transforms to create and insert spatial components + await this.scrapeDwCAndUploadOccurrences(submissionId); + + const workbookBuffer = this.createWorkbookFromJSON(decoratedDWC); + + const { outputFileName, s3OutputKey } = await this.uploadDwCWorkbookToS3( + submissionId, + workbookBuffer, + dwcPrep.s3InputKey, + dwcPrep.archive + ); + + // Update occurrence submission with output filename and key + await this.occurrenceService.updateSurveyOccurrenceSubmissionWithOutputKey( + submissionId, + outputFileName, + s3OutputKey + ); + } catch (error) { + defaultLog.debug({ label: 'processDWCFile', message: 'error', error }); + if (error instanceof SubmissionError) { + await this.errorService.insertSubmissionError(submissionId, error); + } else { + throw error; + } + } + } + + /** + * Process an XLSX file. + * + * @param {number} submissionId + * @param {number} surveyId + * @memberof ValidationService + */ + async processXLSXFile(submissionId: number, surveyId: number) { + defaultLog.debug({ label: 'processXLSXFile', submissionId, surveyId }); + try { + // Prepare template + const submissionPrep = await this.templatePreparation(submissionId); + + // Run template validations + await this.templateValidation(submissionPrep.xlsx, surveyId); + + // Insert validation complete status + await this.submissionRepository.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.TEMPLATE_VALIDATED); + + // Run template transformations + const transformedObject = await this.templateTransformation(submissionPrep.xlsx, surveyId); + + // Insert transformation complete status + await this.submissionRepository.insertSubmissionStatus(submissionId, SUBMISSION_STATUS_TYPE.TEMPLATE_TRANSFORMED); + + // Apply decorations to DwC + const decoratedDWC = await this.dwCService.decorateDwCJSON(transformedObject); + + await this.occurrenceService.updateDWCSourceForOccurrenceSubmission(submissionId, JSON.stringify(decoratedDWC)); + + // Run transforms to create and insert spatial components + await this.scrapeDwCAndUploadOccurrences(submissionId); + + const workbookBuffer = this.createWorkbookFromJSON(decoratedDWC); + + const { outputFileName, s3OutputKey } = await this.uploadDwCWorkbookToS3( + submissionId, + workbookBuffer, + submissionPrep.s3InputKey, + submissionPrep.xlsx + ); + + // Update occurrence submission with output filename and key + await this.occurrenceService.updateSurveyOccurrenceSubmissionWithOutputKey( + submissionId, + outputFileName, + s3OutputKey + ); + } catch (error) { + defaultLog.debug({ label: 'processXLSXFile', message: 'error', error }); + if (error instanceof SubmissionError) { + await this.errorService.insertSubmissionError(submissionId, error); + } else { + throw error; + } + } + } + + validateDWC(archive: DWCArchive): ICsvMediaState { + defaultLog.debug({ label: 'validateDWC' }); + try { + const validationSchema = {}; + const rules = this.getValidationRules(validationSchema); + const csvState = this.validateDWCArchive(archive, rules); + + return csvState; + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_VALIDATION); + } + throw error; + } + } + + async dwcPreparation(submissionId: number): Promise<{ archive: DWCArchive; s3InputKey: string }> { + defaultLog.debug({ label: 'dwcPreparation', submissionId }); + try { + const occurrenceSubmission = await this.occurrenceService.getOccurrenceSubmission(submissionId); + const s3InputKey = occurrenceSubmission.input_key; + const s3File = await getFileFromS3(s3InputKey); + const archive = this.prepDWCArchive(s3File); + + return { archive, s3InputKey }; + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_PROCESSING_OCCURRENCE_DATA); + } + throw error; + } + } + + async templatePreparation(submissionId: number): Promise<{ s3InputKey: string; xlsx: XLSXCSV }> { + defaultLog.debug({ label: 'templatePreparation', submissionId }); + try { + const occurrenceSubmission = await this.occurrenceService.getOccurrenceSubmission(submissionId); + const s3InputKey = occurrenceSubmission.input_key; + const s3File = await getFileFromS3(s3InputKey); + const xlsx = this.prepXLSX(s3File); + + return { s3InputKey: s3InputKey, xlsx: xlsx }; + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_OCCURRENCE_PREPARATION); + } + throw error; + } + } + + async scrapeDwCAndUploadOccurrences(submissionId: number) { + defaultLog.debug({ label: 'scrapeDwCAndUploadOccurrences', submissionId }); + try { + await this.spatialService.runSpatialTransforms(submissionId); + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_PROCESSING_OCCURRENCE_DATA); + } + throw error; + } + } + + async templateValidation(xlsx: XLSXCSV, surveyId: number) { + defaultLog.debug({ label: 'templateValidation' }); + try { + const schema = await this.getValidationSchema(xlsx, surveyId); + const schemaParser = this.getValidationRules(schema); + const csvState = this.validateXLSX(xlsx, schemaParser); + await this.persistValidationResults(csvState.csv_state, csvState.media_state); + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_VALIDATION); + } + throw error; + } + } + + async templateTransformation(xlsx: XLSXCSV, surveyId: number) { + defaultLog.debug({ label: 'templateTransformation' }); + try { + const xlsxSchema = await this.getTransformationSchema(xlsx, surveyId); + + return this.transformXLSX(xlsx.workbook.rawWorkbook, xlsxSchema); + } catch (error) { + if (error instanceof SubmissionError) { + error.setStatus(SUBMISSION_STATUS_TYPE.FAILED_TRANSFORMED); + } + throw error; + } + } + + prepXLSX(file: any): XLSXCSV { + defaultLog.debug({ label: 'prepXLSX', message: 's3File' }); + const parsedMedia = parseUnknownMedia(file); + + // not sure how to trigger these through testing + if (!parsedMedia) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.UNSUPPORTED_FILE_TYPE); + } + + // not sure how to trigger these through testing + if (!(parsedMedia instanceof MediaFile)) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + + const xlsxCsv = new XLSXCSV(parsedMedia); + + const templateName = xlsxCsv.workbook.rawWorkbook.Custprops?.['sims_name']; + const templateVersion = xlsxCsv.workbook.rawWorkbook.Custprops?.['sims_version']; + + defaultLog.debug({ + label: 'prepXLSX', + message: 'template properties', + sims_name: templateName, + sims_version: templateVersion + }); + + if (!templateName || !templateVersion) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_TO_GET_TRANSFORM_SCHEMA); + } + + return xlsxCsv; + } + + async getTemplateMethodologySpeciesRecord(file: XLSXCSV, surveyId: number): Promise { + const templateName = file.workbook.rawWorkbook.Custprops?.['sims_name']; + const templateVersion = file.workbook.rawWorkbook.Custprops?.['sims_version']; + + const surveyData = await this.surveyService.getSurveyById(surveyId); + + const surveyFieldMethodId = surveyData.purpose_and_methodology.field_method_id; + const surveySpecies = surveyData.species.focal_species; + + return this.validationRepository.getTemplateMethodologySpeciesRecord( + templateName, + templateVersion, + surveyFieldMethodId, + surveySpecies + ); + } + + async getValidationSchema(file: XLSXCSV, surveyId: number): Promise { + const templateMethodologySpeciesRecord = await this.getTemplateMethodologySpeciesRecord(file, surveyId); + + const validationSchema = templateMethodologySpeciesRecord?.validation; + if (!validationSchema) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_VALIDATION_RULES); + } + + return validationSchema; + } + + getValidationRules(schema: any): ValidationSchemaParser { + const validationSchemaParser = new ValidationSchemaParser(schema); + return validationSchemaParser; + } + + validateXLSX(file: XLSXCSV, parser: ValidationSchemaParser) { + // Run media validations + file.validateMedia(parser); + + const media_state = file.getMediaState(); + if (!media_state.isValid) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + + // Run CSV content validations + file.validateContent(parser); + const csv_state = file.getContentState(); + + return { csv_state, media_state }; + } + + /** + * Return normalized DwCA data + * + * @param {DWCArchive} dwcArchiveFile + * @return {*} {Record[]>} + * @memberof DarwinCoreService + */ + normalizeDWCArchive(dwcArchiveFile: DWCArchive): Record[]> { + const normalized: Record[]> = {}; + + Object.entries(dwcArchiveFile.worksheets).forEach(([worksheetName, worksheet]) => { + if (worksheet) { + normalized[worksheetName] = worksheet.getRowObjects(); + } + }); + + return normalized; + } + + async persistValidationResults(csvState: ICsvState[], mediaState: IMediaState): Promise { + defaultLog.debug({ label: 'persistValidationResults', message: 'validationResults' }); + + let parseError = false; + const errors: MessageError[] = []; + + mediaState.fileErrors?.forEach((fileError) => { + errors.push(new MessageError(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA, `${fileError}`, 'Miscellaneous')); + }); + + csvState?.forEach((csvStateItem) => { + csvStateItem.headerErrors?.forEach((headerError) => { + errors.push( + new MessageError( + headerError.errorCode, + this.generateHeaderErrorMessage(csvStateItem.fileName, headerError), + headerError.errorCode + ) + ); + }); + + csvStateItem.rowErrors?.forEach((rowError) => { + errors.push( + new MessageError( + rowError.errorCode, + this.generateRowErrorMessage(csvStateItem.fileName, rowError), + rowError.errorCode + ) + ); + }); + + csvStateItem.keyErrors?.forEach((keyError) => { + errors.push( + new MessageError( + keyError.errorCode, + this.generateKeyErrorMessage(csvStateItem.fileName, keyError), + keyError.errorCode + ) + ); + }); + + if (!mediaState.isValid || csvState?.some((item) => !item.isValid)) { + // At least 1 error exists, skip remaining steps + parseError = true; + } + }); + + if (parseError) { + throw new SubmissionError({ messages: errors }); + } + + return parseError; + } + + async getTransformationSchema(file: XLSXCSV, surveyId: number): Promise { + const templateMethodologySpeciesRecord = await this.getTemplateMethodologySpeciesRecord(file, surveyId); + + const transformationSchema = templateMethodologySpeciesRecord?.transform; + + if (!transformationSchema) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_TRANSFORMATION_RULES); + } + + return transformationSchema; + } + + transformXLSX(workbook: xlsx.WorkBook, transformSchema: TransformSchema): Record[]> { + const xlsxTransform = new XLSXTransform(workbook, transformSchema); + + return xlsxTransform.start(); + } + + createWorkbookFromJSON(preparedRowObjectsForJSONToSheet: Record[]>): IFileBuffer[] { + const dwcWorkbook = xlsx.utils.book_new(); + return Object.entries(preparedRowObjectsForJSONToSheet).map(([key, value]) => { + const worksheet = xlsx.utils.json_to_sheet(value); + + const newWorkbook = xlsx.utils.book_new(); + xlsx.utils.book_append_sheet(newWorkbook, worksheet, 'Sheet1'); + xlsx.utils.book_append_sheet(dwcWorkbook, worksheet, key); + + const buffer = xlsx.write(newWorkbook, { type: 'buffer', bookType: 'csv' }); + + return { + name: key, + buffer + } as IFileBuffer; + }); + } + + async uploadDwCWorkbookToS3( + submissionId: number, + fileBuffers: IFileBuffer[], + s3InputKey: string, + data: XLSXCSV | DWCArchive + ) { + defaultLog.debug({ + label: 'uploadDwCWorkbookToS3', + submissionId, + s3InputKey + }); + + // Build the archive zip file + const dwcArchiveZip = new AdmZip(); + fileBuffers.forEach((file) => dwcArchiveZip.addFile(`${file.name}.csv`, file.buffer)); + + // Remove the filename from original s3Key + // Example: project/1/survey/1/submission/file_name.txt -> project/1/survey/1/submission + const s3OutputKeyPrefix = s3InputKey.split('/').slice(0, -1).join('/'); + + const outputFileName = `${data.rawFile.name}_processed.zip`; + const s3OutputKey = `${s3OutputKeyPrefix}/${outputFileName}`; + + // Upload transformed archive to s3 + await uploadBufferToS3(dwcArchiveZip.toBuffer(), 'application/zip', s3OutputKey); + + return { outputFileName, s3OutputKey }; + } + + prepDWCArchive(s3File: any): DWCArchive { + defaultLog.debug({ label: 'prepDWCArchive', message: 's3File' }); + + const parsedMedia = parseUnknownMedia(s3File); + if (!parsedMedia) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + + if (!(parsedMedia instanceof ArchiveFile)) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.UNSUPPORTED_FILE_TYPE); + } + + const dwcArchive = new DWCArchive(parsedMedia); + return dwcArchive; + } + + validateDWCArchive(dwc: DWCArchive, parser: ValidationSchemaParser): ICsvMediaState { + defaultLog.debug({ label: 'validateDWCArchive', message: 'dwcArchive' }); + + // Run DwC media validations + dwc.validateMedia(parser); + + const media_state = dwc.getMediaState(); + if (!media_state.isValid) { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.INVALID_MEDIA); + } + + // Run DwC content validations + dwc.validateContent(parser); + const csv_state = dwc.getContentState(); + + return { csv_state, media_state }; + } + + /** + * Generates error messages relating to CSV headers. + * + * @param fileName + * @param headerError + * @returns {string} + */ + generateHeaderErrorMessage(fileName: string, headerError: IHeaderError): string { + return `${fileName} - ${headerError.message} - Column: ${headerError.col}`; + } + + /** + * Generates error messages relating to CSV rows. + * + * @param fileName + * @param rowError + * @returns {string} + */ + generateRowErrorMessage(fileName: string, rowError: IRowError): string { + return `${fileName} - ${rowError.message} - Column: ${rowError.col} - Row: ${rowError.row}`; + } + + /** + * Generates error messages relating to CSV workbook keys. + * + * @param fileName + * @param keyError + * @returns {string} + */ + generateKeyErrorMessage(fileName: string, keyError: IKeyError): string { + return `${fileName} - ${keyError.message} - Rows: ${keyError.rows.join(', ')}`; + } +} diff --git a/api/src/utils/db-constant-utils.ts b/api/src/utils/db-constant-utils.ts index 2c805586d0..960c6215c0 100644 --- a/api/src/utils/db-constant-utils.ts +++ b/api/src/utils/db-constant-utils.ts @@ -1,5 +1,5 @@ import { IDBConnection } from '../database/db'; -import { HTTP400 } from '../errors/custom-error'; +import { HTTP400 } from '../errors/http-error'; import { queries } from '../queries/queries'; /** diff --git a/api/src/utils/file-utils.test.ts b/api/src/utils/file-utils.test.ts index 557560f92d..f81c8cf3de 100644 --- a/api/src/utils/file-utils.test.ts +++ b/api/src/utils/file-utils.test.ts @@ -1,6 +1,16 @@ +import AWS from 'aws-sdk'; import { expect } from 'chai'; import { describe } from 'mocha'; -import { deleteFileFromS3, generateS3FileKey, getS3SignedURL } from './file-utils'; +import { + deleteFileFromS3, + generateS3FileKey, + getS3HostUrl, + getS3SignedURL, + _getClamAvScanner, + _getObjectStoreBucketName, + _getObjectStoreUrl, + _getS3Client +} from './file-utils'; describe('deleteFileFromS3', () => { it('returns null when no key specified', async () => { @@ -63,3 +73,111 @@ describe('generateS3FileKey', () => { expect(result).to.equal('projects/1/surveys/2/summaryresults/3/testFileName'); }); }); + +describe('getS3HostUrl', () => { + beforeEach(() => { + process.env.OBJECT_STORE_URL = 's3.host.example.com'; + process.env.OBJECT_STORE_BUCKET_NAME = 'test-bucket-name'; + }); + + it('should yield a default S3 host url', () => { + delete process.env.OBJECT_STORE_URL; + delete process.env.OBJECT_STORE_BUCKET_NAME; + + const result = getS3HostUrl(); + + expect(result).to.equal('nrs.objectstore.gov.bc.ca'); + }); + + it('should successfully produce an S3 host url', () => { + const result = getS3HostUrl(); + + expect(result).to.equal('s3.host.example.com/test-bucket-name'); + }); + + it('should successfully append a key to an S3 host url', () => { + const result = getS3HostUrl('my-test-file.txt'); + + expect(result).to.equal('s3.host.example.com/test-bucket-name/my-test-file.txt'); + }); +}); + +describe('_getS3Client', () => { + it('should return an S3 client', () => { + process.env.OBJECT_STORE_ACCESS_KEY_ID = 'aaaa'; + process.env.OBJECT_STORE_SECRET_KEY_ID = 'bbbb'; + + const result = _getS3Client(); + expect(result).to.be.instanceOf(AWS.S3); + }); +}); + +describe('_getClamAvScanner', () => { + it('should return a clamAv scanner client', () => { + process.env.ENABLE_FILE_VIRUS_SCAN = 'true'; + process.env.CLAMAV_HOST = 'host'; + process.env.CLAMAV_PORT = '1111'; + + const result = _getClamAvScanner(); + expect(result).to.not.be.null; + }); + + it('should return null if enable file virus scan is not set to true', () => { + process.env.ENABLE_FILE_VIRUS_SCAN = 'false'; + process.env.CLAMAV_HOST = 'host'; + process.env.CLAMAV_PORT = '1111'; + + const result = _getClamAvScanner(); + expect(result).to.be.null; + }); + + it('should return null if CLAMAV_HOST is not set', () => { + process.env.ENABLE_FILE_VIRUS_SCAN = 'true'; + delete process.env.CLAMAV_HOST; + process.env.CLAMAV_PORT = '1111'; + + const result = _getClamAvScanner(); + expect(result).to.be.null; + }); + + it('should return null if CLAMAV_PORT is not set', () => { + process.env.ENABLE_FILE_VIRUS_SCAN = 'true'; + process.env.CLAMAV_HOST = 'host'; + delete process.env.CLAMAV_PORT; + + const result = _getClamAvScanner(); + expect(result).to.be.null; + }); +}); + +describe('_getObjectStoreBucketName', () => { + it('should return an object store bucket name', () => { + process.env.OBJECT_STORE_BUCKET_NAME = 'test-bucket1'; + + const result = _getObjectStoreBucketName(); + expect(result).to.equal('test-bucket1'); + }); + + it('should return its default value', () => { + delete process.env.OBJECT_STORE_BUCKET_NAME; + + const result = _getObjectStoreBucketName(); + expect(result).to.equal(''); + }); +}); + +describe('_getObjectStoreUrl', () => { + it('should return an object store bucket name', () => { + process.env.OBJECT_STORE_URL = 'test-url1'; + + const result = _getObjectStoreUrl(); + expect(result).to.equal('test-url1'); + }); + + it('should return its default value', () => { + delete process.env.OBJECT_STORE_URL; + + const result = _getObjectStoreUrl(); + expect(result).to.equal('nrs.objectstore.gov.bc.ca'); + }); +}); diff --git a/api/src/utils/file-utils.ts b/api/src/utils/file-utils.ts index 23363f729d..1576dd81e5 100644 --- a/api/src/utils/file-utils.ts +++ b/api/src/utils/file-utils.ts @@ -1,26 +1,79 @@ import AWS from 'aws-sdk'; -import { DeleteObjectOutput, GetObjectOutput, ManagedUpload, Metadata } from 'aws-sdk/clients/s3'; +import { + DeleteObjectOutput, + GetObjectOutput, + HeadObjectOutput, + ListObjectsOutput, + ManagedUpload, + Metadata +} from 'aws-sdk/clients/s3'; import clamd from 'clamdjs'; import { S3_ROLE } from '../constants/roles'; +import { SUBMISSION_MESSAGE_TYPE } from '../constants/status'; +import { SubmissionErrorFromMessageType } from './submission-error'; -const ClamAVScanner = - (process.env.ENABLE_FILE_VIRUS_SCAN === 'true' && - process.env.CLAMAV_HOST && - process.env.CLAMAV_PORT && - clamd.createScanner(process.env.CLAMAV_HOST, Number(process.env.CLAMAV_PORT))) || - null; - -const OBJECT_STORE_BUCKET_NAME = process.env.OBJECT_STORE_BUCKET_NAME || ''; -const OBJECT_STORE_URL = process.env.OBJECT_STORE_URL || 'nrs.objectstore.gov.bc.ca'; -const AWS_ENDPOINT = new AWS.Endpoint(OBJECT_STORE_URL); -const S3 = new AWS.S3({ - endpoint: AWS_ENDPOINT.href, - accessKeyId: process.env.OBJECT_STORE_ACCESS_KEY_ID, - secretAccessKey: process.env.OBJECT_STORE_SECRET_KEY_ID, - signatureVersion: 'v4', - s3ForcePathStyle: true, - region: 'ca-central-1' -}); +/** + * Local getter for retrieving the ClamAV client. + * + * @returns {*} {clamd.ClamScanner | null} The ClamAV Scanner if `process.env.ENABLE_FILE_VIRUS_SCAN` is set to + * 'true' and other appropriate environment variables are set; `null` otherwise. + */ +export const _getClamAvScanner = (): clamd.ClamScanner | null => { + if (process.env.ENABLE_FILE_VIRUS_SCAN === 'true' && process.env.CLAMAV_HOST && process.env.CLAMAV_PORT) { + return clamd.createScanner(process.env.CLAMAV_HOST, Number(process.env.CLAMAV_PORT)); + } + + return null; +}; + +/** + * Local getter for retrieving the S3 client. + * + * @returns {*} {AWS.S3} The S3 client + */ +export const _getS3Client = (): AWS.S3 => { + const awsEndpoint = new AWS.Endpoint(_getObjectStoreUrl()); + + return new AWS.S3({ + endpoint: awsEndpoint.href, + accessKeyId: process.env.OBJECT_STORE_ACCESS_KEY_ID, + secretAccessKey: process.env.OBJECT_STORE_SECRET_KEY_ID, + signatureVersion: 'v4', + s3ForcePathStyle: true, + region: 'ca-central-1' + }); +}; + +/** + * Local getter for retrieving the S3 object store URL. + * + * @returns {*} {string} The object store URL + */ +export const _getObjectStoreUrl = (): string => { + return process.env.OBJECT_STORE_URL || 'nrs.objectstore.gov.bc.ca'; +}; + +/** + * Local getter for retrieving the S3 object store bucket name. + * + * @returns {*} {string} The object store bucket name + */ +export const _getObjectStoreBucketName = (): string => { + return process.env.OBJECT_STORE_BUCKET_NAME || ''; +}; + +/** + * Returns the S3 host URL. It optionally takes an S3 key as a parameter, which produces + * a full URL to the given file in S3. + * + * @export + * @param {string} [key] The key to an object in S3 + * @returns {*} {string} The s3 host URL + */ +export const getS3HostUrl = (key?: string): string => { + // Appends the given S3 object key, trimming between 0 and 2 trailing '/' characters + return `${_getObjectStoreUrl()}/${_getObjectStoreBucketName()}/${key || ''}`.replace(/\/{0,2}$/, ''); +}; /** * Delete a file from S3, based on its key. @@ -33,11 +86,12 @@ const S3 = new AWS.S3({ * @returns {Promise} the response from S3 or null if required parameters are null */ export async function deleteFileFromS3(key: string): Promise { - if (!key) { + const s3Client = _getS3Client(); + if (!key || !s3Client) { return null; } - return S3.deleteObject({ Bucket: OBJECT_STORE_BUCKET_NAME, Key: key }).promise(); + return s3Client.deleteObject({ Bucket: _getObjectStoreBucketName(), Key: key }).promise(); } /** @@ -56,14 +110,18 @@ export async function uploadFileToS3( key: string, metadata: Metadata = {} ): Promise { - return S3.upload({ - Bucket: OBJECT_STORE_BUCKET_NAME, - Body: file.buffer, - ContentType: file.mimetype, - Key: key, - ACL: S3_ROLE.AUTH_READ, - Metadata: metadata - }).promise(); + const s3Client = _getS3Client(); + + return s3Client + .upload({ + Bucket: _getObjectStoreBucketName(), + Body: file.buffer, + ContentType: file.mimetype, + Key: key, + ACL: S3_ROLE.AUTH_READ, + Metadata: metadata + }) + .promise(); } export async function uploadBufferToS3( @@ -72,14 +130,21 @@ export async function uploadBufferToS3( key: string, metadata: Metadata = {} ): Promise { - return S3.upload({ - Bucket: OBJECT_STORE_BUCKET_NAME, - Body: buffer, - ContentType: mimetype, - Key: key, - ACL: S3_ROLE.AUTH_READ, - Metadata: metadata - }).promise(); + const s3Client = _getS3Client(); + + return s3Client + .upload({ + Bucket: _getObjectStoreBucketName(), + Body: buffer, + ContentType: mimetype, + Key: key, + ACL: S3_ROLE.AUTH_READ, + Metadata: metadata + }) + .promise() + .catch(() => { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_UPLOAD_FILE_TO_S3); + }); } /** @@ -91,11 +156,45 @@ export async function uploadBufferToS3( * @return {*} {Promise} */ export async function getFileFromS3(key: string, versionId?: string): Promise { - return S3.getObject({ - Bucket: OBJECT_STORE_BUCKET_NAME, - Key: key, - VersionId: versionId - }).promise(); + const s3Client = _getS3Client(); + + return s3Client + .getObject({ + Bucket: _getObjectStoreBucketName(), + Key: key, + VersionId: versionId + }) + .promise() + .catch(() => { + throw SubmissionErrorFromMessageType(SUBMISSION_MESSAGE_TYPE.FAILED_GET_FILE_FROM_S3); + }); +} + +/** + * Fetchs a list of files in S3 at the given path + * + * @export + * @param {string} path the path (Prefix) of the directory in S3 + * @return {*} {Promise} All objects at the given path, also including + * the directory itself. + */ +export const listFilesFromS3 = async (path: string): Promise => { + const s3Client = _getS3Client(); + + return s3Client.listObjects({ Bucket: _getObjectStoreBucketName(), Prefix: path }).promise(); +}; + +/** + * Retrieves all metadata for the given S3 object, including custom HTTP headers. + * + * @export + * @param {string} key the key of the object + * @returns {*} {Promise { + const s3Client = _getS3Client(); + + return s3Client.headObject({ Bucket: _getObjectStoreBucketName(), Key: key }).promise(); } /** @@ -105,12 +204,14 @@ export async function getFileFromS3(key: string, versionId?: string): Promise} the response from S3 or null if required parameters are null */ export async function getS3SignedURL(key: string): Promise { - if (!key) { + const s3Client = _getS3Client(); + + if (!key || !s3Client) { return null; } - return S3.getSignedUrl('getObject', { - Bucket: OBJECT_STORE_BUCKET_NAME, + return s3Client.getSignedUrl('getObject', { + Bucket: _getObjectStoreBucketName(), Key: key, Expires: 300000 // 5 minutes }); @@ -160,6 +261,8 @@ export function generateS3FileKey(options: IS3FileKey): string { } export async function scanFileForVirus(file: Express.Multer.File): Promise { + const ClamAVScanner = _getClamAvScanner(); + // if virus scan is not to be performed/cannot be performed if (!ClamAVScanner) { return true; diff --git a/api/src/utils/keycloak-utils.test.ts b/api/src/utils/keycloak-utils.test.ts index f4423fa297..285adc64c0 100644 --- a/api/src/utils/keycloak-utils.test.ts +++ b/api/src/utils/keycloak-utils.test.ts @@ -1,172 +1,145 @@ import { expect } from 'chai'; import { describe } from 'mocha'; import { SYSTEM_IDENTITY_SOURCE } from '../constants/database'; -import { convertUserIdentitySource, getUserIdentifier, getUserIdentitySource } from './keycloak-utils'; +import { coerceUserIdentitySource, getUserGuid, getUserIdentifier, getUserIdentitySource } from './keycloak-utils'; -describe('getUserIdentifier', () => { - it('returns null response when null keycloakToken provided', () => { - const response = getUserIdentifier((null as unknown) as object); +describe('keycloakUtils', () => { + describe('getUserGuid', () => { + it('returns null response when null keycloakToken provided', () => { + const response = getUserGuid((null as unknown) as object); - expect(response).to.be.null; - }); - - it('returns null response when valid keycloakToken provided with no preferred_username', () => { - const response = getUserIdentifier({}); - - expect(response).to.be.null; - }); - - it('returns null response when valid keycloakToken provided with null preferred_username', () => { - const response = getUserIdentifier({ preferred_username: null }); - - expect(response).to.be.null; - }); - - it('returns null response when valid keycloakToken provided with no username', () => { - const response = getUserIdentifier({ preferred_username: '@source' }); - - expect(response).to.be.null; - }); - - it('returns non null response when valid keycloakToken provided', () => { - const response = getUserIdentifier({ preferred_username: 'username@source' }); - - expect(response).to.not.be.null; - }); -}); - -describe('getUserIdentitySource', () => { - it('returns null response when null keycloakToken provided', () => { - const response = getUserIdentitySource((null as unknown) as object); - - expect(response).to.equal(null); - }); - - it('returns null response when valid keycloakToken provided with no preferred_username', () => { - const response = getUserIdentitySource({}); - - expect(response).to.equal(null); - }); - - it('returns null response when valid keycloakToken provided with null preferred_username', () => { - const response = getUserIdentitySource({ preferred_username: null }); - - expect(response).to.equal(null); - }); - - it('returns null response when valid keycloakToken provided with no source', () => { - const response = getUserIdentitySource({ preferred_username: 'username' }); - - expect(response).to.equal(null); - }); + expect(response).to.be.null; + }); - it('returns non null response when valid keycloakToken provided with lowercase idir source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@idir' }); + it('returns null response when a keycloakToken is provided with a missing preferred_username field', () => { + const response = getUserGuid({ idir_username: 'username' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.IDIR); - }); + expect(response).to.be.null; + }); - it('returns non null response when valid keycloakToken provided with lowercase bceid source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@bceid' }); + it('returns their guid', () => { + const response = getUserGuid({ preferred_username: 'aaaaa@idir' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); + expect(response).to.equal('aaaaa'); + }); }); - it('returns non null response when valid keycloakToken provided with lowercase bceid basic and business source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@bceid-basic-and-business' }); + describe('getUserIdentifier', () => { + it('returns null response when null keycloakToken provided', () => { + const response = getUserIdentifier((null as unknown) as object); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); - }); + expect(response).to.be.null; + }); - it('returns non null response when valid keycloakToken provided with lowercase database source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@database' }); + it('returns null response when a keycloakToken is provided with a missing username field', () => { + const response = getUserIdentifier({ preferred_username: 'aaaaa@idir' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); - }); + expect(response).to.be.null; + }); - it('returns non null response when valid keycloakToken provided with uppercase idir source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@IDIR' }); + it('returns the identifier from their IDIR username', () => { + const response = getUserIdentifier({ preferred_username: 'aaaaa@idir', idir_username: 'idiruser' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.IDIR); - }); + expect(response).to.equal('idiruser'); + }); - it('returns non null response when valid keycloakToken provided with uppercase bceid source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@BCEID' }); + it('returns the identifier from their BCeID username', () => { + const response = getUserIdentifier({ preferred_username: 'aaaaa@idir', bceid_username: 'bceiduser' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); + expect(response).to.equal('bceiduser'); + }); }); - it('returns non null response when valid keycloakToken provided with uppercase bceid basic and business source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@BCEID-BASIC-AND-BUSINESS' }); + describe('getUserIdentitySource', () => { + it('returns non null response when null keycloakToken provided', () => { + const response = getUserIdentitySource((null as unknown) as object); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); - }); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); + }); - it('returns non null response when valid keycloakToken provided with uppercase database source', () => { - const response = getUserIdentitySource({ preferred_username: 'username@DATABASE' }); + it('returns non null response when valid keycloakToken provided with no preferred_username', () => { + const response = getUserIdentitySource({}); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); - }); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); + }); - describe('convertUserIdentitySource', () => { - it('returns null response when null identity source provided', () => { - const response = convertUserIdentitySource((null as unknown) as string); + it('returns non null response when valid keycloakToken provided with null preferred_username', () => { + const response = getUserIdentitySource({ preferred_username: null }); - expect(response).to.equal(null); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); }); - it('returns null response when empty identity source provided', () => { - const response = convertUserIdentitySource(''); + it('returns non null response when valid keycloakToken provided with no source', () => { + const response = getUserIdentitySource({ preferred_username: 'username' }); - expect(response).to.equal(null); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); }); - it('returns non null response when lowercase idir source provided', () => { - const response = convertUserIdentitySource('idir'); + it('returns non null response when valid keycloakToken provided with idir source', () => { + const response = getUserIdentitySource({ preferred_username: 'username@idir' }); expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.IDIR); }); - it('returns non null response when lowercase bceid source provided', () => { - const response = convertUserIdentitySource('bceid'); + it('returns non null response when valid keycloakToken provided with bceid basic source', () => { + const response = getUserIdentitySource({ preferred_username: 'username@bceidbasic' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID_BASIC); }); - it('returns non null response when lowercase bceid basic and business source provided', () => { - const response = convertUserIdentitySource('bceid-basic-and-business'); + it('returns non null response when valid keycloakToken provided with bceid business source', () => { + const response = getUserIdentitySource({ preferred_username: 'username@bceidbusiness' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS); }); - it('returns non null response when lowercase database source provided', () => { - const response = convertUserIdentitySource('database'); + it('returns non null response when valid keycloakToken provided with database source', () => { + const response = getUserIdentitySource({ preferred_username: 'username@database' }); expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); }); - it('returns non null response when uppercase idir source provided', () => { - const response = convertUserIdentitySource('IDIR'); + it('returns non null response when valid keycloakToken provided with system source', () => { + const response = getUserIdentitySource({ preferred_username: 'username@system' }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.IDIR); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.SYSTEM); }); + }); - it('returns non null response when uppercase bceid source provided', () => { - const response = convertUserIdentitySource('BCEID'); + describe('coerceUserIdentitySource', () => { + it('should coerce empty string user identity to DATABASE', () => { + const response = coerceUserIdentitySource(''); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); + }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); + it('should coerce null string user identity to DATABASE', () => { + const response = coerceUserIdentitySource((null as unknown) as string); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); }); - it('returns non null response when uppercase bceid basic and business source provided', () => { - const response = convertUserIdentitySource('BCEID-BASIC-AND-BUSINESS'); + it('should coerce bceid basic user identity to BCEIDBASIC', () => { + const response = coerceUserIdentitySource('bceidbasic'); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID_BASIC); + }); - expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID); + it('should coerce bceid business user identity to BCEIDBUSINESS', () => { + const response = coerceUserIdentitySource('bceidbusiness'); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS); }); - it('returns non null response when uppercase database source provided', () => { - const response = convertUserIdentitySource('DATABASE'); + it('should coerce idir user identity to IDIR', () => { + const response = coerceUserIdentitySource('idir'); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.IDIR); + }); + it('should coerce database user identity to DATABASE', () => { + const response = coerceUserIdentitySource('database'); expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.DATABASE); }); + + it('should coerce system user identity to SYSTEM', () => { + const response = coerceUserIdentitySource('system'); + expect(response).to.equal(SYSTEM_IDENTITY_SOURCE.SYSTEM); + }); }); }); diff --git a/api/src/utils/keycloak-utils.ts b/api/src/utils/keycloak-utils.ts index e32e7ceda6..b7869932b2 100644 --- a/api/src/utils/keycloak-utils.ts +++ b/api/src/utils/keycloak-utils.ts @@ -1,13 +1,15 @@ import { SYSTEM_IDENTITY_SOURCE } from '../constants/database'; -import { EXTERNAL_BCEID_IDENTITY_SOURCES, EXTERNAL_IDIR_IDENTITY_SOURCES } from '../constants/keycloak'; /** - * Parses out the preferred_username name from the token. + * Parses out the user's GUID from a keycloak token, which is extracted from the + * `preferred_username` property. + * + * @example getUserGuid({ preferred_username: 'aaabbaaa@idir' }) // => 'aaabbaaa' * * @param {object} keycloakToken * @return {*} {(string | null)} */ -export const getUserIdentifier = (keycloakToken: object): string | null => { +export const getUserGuid = (keycloakToken: object): string | null => { const userIdentifier = keycloakToken?.['preferred_username']?.split('@')?.[0]; if (!userIdentifier) { @@ -18,40 +20,71 @@ export const getUserIdentifier = (keycloakToken: object): string | null => { }; /** - * Parses out the preferred_username identity source (idir, bceid, etc) from the token. + * Parses out the preferred_username identity source ('idir', 'bceidbasic', etc.) from the token and maps it to a known + * `SYSTEM_IDENTITY_SOURCE`. If the `identity_provider` field in the keycloak token object is undefined, then the + * identity source is inferred from the `preferred_username` field as a contingency. + * + * @example getUserIdentitySource({ ...token, identity_provider: 'bceidbasic' }) => SYSTEM_IDENTITY_SOURCE.BCEID_BASIC + * @example getUserIdentitySource({ preferred_username: 'aaaa@idir' }) => SYSTEM_IDENTITY_SOURCE.IDIR * * @param {object} keycloakToken - * @return {*} {(SYSTEM_IDENTITY_SOURCE | null)} + * @return {*} {SYSTEM_IDENTITY_SOURCE} */ -export const getUserIdentitySource = (keycloakToken: object): SYSTEM_IDENTITY_SOURCE | null => { - const userIdentitySource = keycloakToken?.['preferred_username']?.split('@')?.[1]?.toUpperCase(); +export const getUserIdentitySource = (keycloakToken: object): SYSTEM_IDENTITY_SOURCE => { + const userIdentitySource: string = + keycloakToken?.['identity_provider'] || keycloakToken?.['preferred_username']?.split('@')?.[1]; - return convertUserIdentitySource(userIdentitySource); + return coerceUserIdentitySource(userIdentitySource); }; /** - * Converts an identity source string to a matching one supported by the database. + * Coerce the raw keycloak token identity provider value into an system identity source enum value. + * If the given user identity source string does not satisfy one of `SYSTEM_IDENTITY_SOURCE`, the return + * value defaults to `SYSTEM_IDENTITY_SOURCE.DATABASE`. * - * Why? Some identity sources ave multiple variations of their source string, which the get translated to a single - * variation so that the SIMS application doesn't have to account for every variation in its logic. + * @example coerceUserIdentitySource('idir') => 'idir' satisfies SYSTEM_IDENTITY_SOURCE.IDIR * - * @param {object} keycloakToken - * @return {*} {(SYSTEM_IDENTITY_SOURCE | null)} + * @param userIdentitySource the identity source string + * @returns {*} {SYSTEM_IDENTITY_SOURCE} the identity source belonging to type SYSTEM_IDENTITY_SOURCE */ -export const convertUserIdentitySource = (identitySource: string): SYSTEM_IDENTITY_SOURCE | null => { - const uppercaseIdentitySource = identitySource?.toUpperCase(); +export const coerceUserIdentitySource = (userIdentitySource: string | null): SYSTEM_IDENTITY_SOURCE => { + switch (userIdentitySource?.toUpperCase()) { + case SYSTEM_IDENTITY_SOURCE.BCEID_BASIC: + return SYSTEM_IDENTITY_SOURCE.BCEID_BASIC; - if (EXTERNAL_BCEID_IDENTITY_SOURCES.includes(uppercaseIdentitySource)) { - return SYSTEM_IDENTITY_SOURCE.BCEID; - } + case SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS: + return SYSTEM_IDENTITY_SOURCE.BCEID_BUSINESS; - if (EXTERNAL_IDIR_IDENTITY_SOURCES.includes(uppercaseIdentitySource)) { - return SYSTEM_IDENTITY_SOURCE.IDIR; + case SYSTEM_IDENTITY_SOURCE.IDIR: + return SYSTEM_IDENTITY_SOURCE.IDIR; + + case SYSTEM_IDENTITY_SOURCE.SYSTEM: + return SYSTEM_IDENTITY_SOURCE.SYSTEM; + + case SYSTEM_IDENTITY_SOURCE.DATABASE: + return SYSTEM_IDENTITY_SOURCE.DATABASE; + + default: + // Covers a user created directly in keycloak which wouldn't have an identity source + return SYSTEM_IDENTITY_SOURCE.DATABASE; } +}; + +/** + * Parses out the user's identifier from a keycloak token. + * + * @example getUserIdentifier({ ....token, bceid_username: 'jsmith@idir' }) => 'jsmith' + * + * @param {object} keycloakToken + * @return {*} {(string | null)} + */ +export const getUserIdentifier = (keycloakToken: object): string | null => { + const userIdentifier = + keycloakToken?.['idir_username'] || keycloakToken?.['bceid_username'] || keycloakToken?.['sims_system_username']; - if (uppercaseIdentitySource === SYSTEM_IDENTITY_SOURCE.DATABASE) { - return SYSTEM_IDENTITY_SOURCE.DATABASE; + if (!userIdentifier) { + return null; } - return null; + return userIdentifier; }; diff --git a/api/src/utils/logger.test.ts b/api/src/utils/logger.test.ts index c5f0c763d4..c5dae291a5 100644 --- a/api/src/utils/logger.test.ts +++ b/api/src/utils/logger.test.ts @@ -1,6 +1,7 @@ import { expect } from 'chai'; import { describe } from 'mocha'; -import { ApiError, ApiErrorType, HTTP500 } from '../errors/custom-error'; +import { ApiError, ApiErrorType } from '../errors/api-error'; +import { HTTP500 } from '../errors/http-error'; import { getPrintfFunction, ILoggerMessage, diff --git a/api/src/utils/logger.ts b/api/src/utils/logger.ts index c295bce782..8587486a99 100644 --- a/api/src/utils/logger.ts +++ b/api/src/utils/logger.ts @@ -1,5 +1,6 @@ import winston from 'winston'; -import { ApiError, HTTPError } from '../errors/custom-error'; +import { ApiError } from '../errors/api-error'; +import { HTTPError } from '../errors/http-error'; /** * Logger input. diff --git a/api/src/utils/media/csv/csv-file.test.ts b/api/src/utils/media/csv/csv-file.test.ts index 29c5a97750..38de369af7 100644 --- a/api/src/utils/media/csv/csv-file.test.ts +++ b/api/src/utils/media/csv/csv-file.test.ts @@ -2,7 +2,8 @@ import { expect } from 'chai'; import { describe } from 'mocha'; import sinon from 'sinon'; import xlsx from 'xlsx'; -import { CSVValidation, CSVWorkBook, CSVWorksheet, IHeaderError, IRowError } from './csv-file'; +import { SUBMISSION_MESSAGE_TYPE } from '../../../constants/status'; +import { CSVValidation, CSVWorkBook, CSVWorksheet, IHeaderError, IKeyError, IRowError } from './csv-file'; describe('CSVWorkBook', () => { it('constructs with no rawWorkbook param', () => { @@ -150,13 +151,13 @@ describe('CSVValidation', () => { expect(csvValidation).not.to.be.null; const headerError1: IHeaderError = { - errorCode: 'Duplicate Header', + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, message: 'a header error', col: 0 }; const headerError2: IHeaderError = { - errorCode: 'Unknown Header', + errorCode: SUBMISSION_MESSAGE_TYPE.UNKNOWN_HEADER, message: 'a second header error', col: 1 }; @@ -178,14 +179,14 @@ describe('CSVValidation', () => { expect(csvValidation).not.to.be.null; const rowError1: IRowError = { - errorCode: 'Missing Required Field', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, message: 'a row error', col: 'col1', row: 1 }; const rowError2: IRowError = { - errorCode: 'Missing Required Field', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, message: 'a second row error', col: 'col1', row: 2 @@ -210,21 +211,29 @@ describe('CSVValidation', () => { const fileError1 = 'a file error'; const headerError1: IHeaderError = { - errorCode: 'Duplicate Header', + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, message: 'a header error', col: 0 }; const rowError1: IRowError = { - errorCode: 'Missing Required Field', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, message: 'a row error', col: 'col1', row: 1 }; + const keyError1: IKeyError = { + errorCode: SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY, + message: 'a key error', + colNames: ['col1', 'col2'], + rows: [2, 3, 4] + }; + csvValidation.addFileErrors([fileError1]); csvValidation.addHeaderErrors([headerError1]); csvValidation.addRowErrors([rowError1]); + csvValidation.addKeyErrors([keyError1]); const validationState = csvValidation.getState(); @@ -233,6 +242,7 @@ describe('CSVValidation', () => { fileErrors: [fileError1], headerErrors: [headerError1], rowErrors: [rowError1], + keyErrors: [keyError1], isValid: false }); }); diff --git a/api/src/utils/media/csv/csv-file.ts b/api/src/utils/media/csv/csv-file.ts index 4b37d1bb82..2270a55f67 100644 --- a/api/src/utils/media/csv/csv-file.ts +++ b/api/src/utils/media/csv/csv-file.ts @@ -1,7 +1,11 @@ import xlsx from 'xlsx'; +import { SUBMISSION_MESSAGE_TYPE } from '../../../constants/status'; +import { safeToLowerCase, safeTrim } from '../../string-utils'; import { IMediaState, MediaValidation } from '../media-file'; +import { getCellValue, getWorksheetRange, replaceCellDates, trimCellWhitespace } from '../xlsx/xlsx-utils'; export type CSVWorksheets = { [name: string]: CSVWorksheet }; +export type WorkBookValidators = { [name: string]: CSVValidation }; export class CSVWorkBook { rawWorkbook: xlsx.WorkBook; @@ -11,7 +15,7 @@ export class CSVWorkBook { constructor(workbook?: xlsx.WorkBook) { this.rawWorkbook = workbook || xlsx.utils.book_new(); - const worksheets = {}; + const worksheets: CSVWorksheets = {}; Object.entries(this.rawWorkbook.Sheets).forEach(([key, value]) => { worksheets[key] = new CSVWorksheet(key, value); @@ -19,6 +23,27 @@ export class CSVWorkBook { this.worksheets = worksheets; } + + /** + * Performs all of the given workbook validators on the workbook. Results of the validation + * are stored in the `csvValidation` property on each of the worksheets within the workbook. This + * method returns the corresponding validations in an object. + * + * @param {WorkBookValidator[]} validators A series of validators to be run on the workbook + * @return {*} {WorkBookValidation} A key-value pair representing all CSV validations for each worksheet, + * where the keys are the names of the worksheets and the values are the corresponding CSV validations. + * @memberof CSVWorkBook + */ + validate(validators: WorkBookValidator[]): WorkBookValidation { + validators.forEach((validator) => validator(this)); + + const validations: WorkBookValidation = {}; + Object.entries(this.worksheets).forEach(([key, value]) => { + validations[key] = value.csvValidation; + }); + + return validations; + } } export class CSVWorksheet { @@ -59,15 +84,13 @@ export class CSVWorksheet { return []; } - const ref = this.worksheet['!ref']; + const originalRange = getWorksheetRange(this.worksheet); - if (!ref) { + if (!originalRange) { return []; } if (!this._headers.length) { - const originalRange = xlsx.utils.decode_range(ref); - // Specify range to only include the 0th row (header row) const customRange: xlsx.Range = { ...originalRange, e: { ...originalRange.e, r: 0 } }; @@ -79,7 +102,7 @@ export class CSVWorksheet { if (aoaHeaders.length > 0) { // Parse the headers array from the array of arrays produced by calling `xlsx.utils.sheet_to_json` - this._headers = aoaHeaders[0].map((item) => item?.trim()); + this._headers = aoaHeaders[0].map(safeTrim); } } @@ -88,7 +111,7 @@ export class CSVWorksheet { getHeadersLowerCase(): string[] { if (!this._headersLowerCase.length) { - this._headersLowerCase = this.getHeaders().map((item) => item?.toLowerCase()); + this._headersLowerCase = this.getHeaders().map(safeToLowerCase); } return this._headersLowerCase; @@ -111,17 +134,15 @@ export class CSVWorksheet { return []; } - const ref = this.worksheet['!ref']; + const originalRange = getWorksheetRange(this.worksheet); - if (!ref) { + if (!originalRange) { return []; } if (!this._rows.length) { const rowsToReturn: string[][] = []; - const originalRange = xlsx.utils.decode_range(ref); - for (let i = 1; i <= originalRange.e.r; i++) { const row = new Array(this.getHeaders().length); let rowHasValues = false; @@ -129,17 +150,13 @@ export class CSVWorksheet { for (let j = 0; j <= originalRange.e.c; j++) { const cellAddress = { c: j, r: i }; const cellRef = xlsx.utils.encode_cell(cellAddress); - const cellValue = this.worksheet[cellRef]; + const cell = this.worksheet[cellRef]; - if (!cellValue) { + if (!cell) { continue; } - // Some cell types (like dates) store different interpretations of the raw value in different properties of - // the `cellValue`. In these cases, always try and return the string version `w`, before returning the - // raw version `v`. - // See https://www.npmjs.com/package/xlsx -> Cell Object - row[j] = cellValue.w || cellValue.v; + row[j] = getCellValue(trimCellWhitespace(replaceCellDates(cell))); rowHasValues = true; } @@ -205,6 +222,14 @@ export class CSVWorksheet { return row[headerIndex]; } + /** + * Runs all of the given validators on the worksheet, whereby the results of all validations + * are stored in `this.csvValidation`. + * + * @param {CSVValidator[]} validators A series of CSV validators to be run on the worksheet. + * @return {*} {CSVValidation} The result of all validations, namely `this.csvValidation`. + * @memberof CSVWorksheet + */ validate(validators: CSVValidator[]): CSVValidation { validators.forEach((validator) => validator(this)); @@ -213,38 +238,44 @@ export class CSVWorksheet { } export type CSVValidator = (csvWorksheet: CSVWorksheet) => CSVWorksheet; +export type WorkBookValidator = (csvWorkBook: CSVWorkBook) => CSVWorkBook; // ensure these error codes match the 'name' column in the table: submission_message_type -export type IHeaderErrorCode = - | 'Duplicate Header' - | 'Unknown Header' - | 'Missing Required Header' - | 'Missing Recommended Header' - | 'Miscellaneous'; - -export type IRowErrorCode = - | 'Missing Required Field' - | 'Unexpected Format' - | 'Out of Range' - | 'Invalid Value' - | 'Miscellaneous'; - export interface IHeaderError { - errorCode: IHeaderErrorCode; + errorCode: + | SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER + | SUBMISSION_MESSAGE_TYPE.UNKNOWN_HEADER + | SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER + | SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER; message: string; col: string | number; } export interface IRowError { - errorCode: IRowErrorCode; + errorCode: + | SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD + | SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE + | SUBMISSION_MESSAGE_TYPE.INVALID_VALUE + | SUBMISSION_MESSAGE_TYPE.UNEXPECTED_FORMAT + | SUBMISSION_MESSAGE_TYPE.NON_UNIQUE_KEY + | SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY; message: string; col: string; row: number; } + +export interface IKeyError { + errorCode: SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY; + message: string; + colNames: string[]; + rows: number[]; +} + export interface ICsvState extends IMediaState { headerErrors: IHeaderError[]; rowErrors: IRowError[]; + keyErrors: IKeyError[]; } /** @@ -257,12 +288,14 @@ export interface ICsvState extends IMediaState { export class CSVValidation extends MediaValidation { headerErrors: IHeaderError[]; rowErrors: IRowError[]; + keyErrors: IKeyError[]; constructor(fileName: string) { super(fileName); this.headerErrors = []; this.rowErrors = []; + this.keyErrors = []; } addHeaderErrors(errors: IHeaderError[]) { @@ -285,13 +318,24 @@ export class CSVValidation extends MediaValidation { } } + addKeyErrors(errors: IKeyError[]) { + this.keyErrors = this.keyErrors.concat(errors); + + if (errors?.length) { + this.isValid = false; + } + } + getState(): ICsvState { return { fileName: this.fileName, fileErrors: this.fileErrors, headerErrors: this.headerErrors, rowErrors: this.rowErrors, + keyErrors: this.keyErrors, isValid: this.isValid }; } } + +export type WorkBookValidation = { [name: string]: CSVValidation }; diff --git a/api/src/utils/media/csv/validation/csv-header-validator.test.ts b/api/src/utils/media/csv/validation/csv-header-validator.test.ts index 9ba17182de..baeec4f489 100644 --- a/api/src/utils/media/csv/validation/csv-header-validator.test.ts +++ b/api/src/utils/media/csv/validation/csv-header-validator.test.ts @@ -1,6 +1,7 @@ import { expect } from 'chai'; import { describe } from 'mocha'; import xlsx from 'xlsx'; +import { SUBMISSION_MESSAGE_TYPE } from '../../../../constants/status'; import { CSVWorksheet } from '../csv-file'; import { getDuplicateHeadersValidator, @@ -45,14 +46,14 @@ describe('getDuplicateHeadersValidator', () => { expect(csvWorkSheet.csvValidation.headerErrors).to.eql([ { - errorCode: 'Duplicate Header', + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, col: 'Header1', - message: 'Duplicate header' + message: 'Duplicate Header' }, { - errorCode: 'Duplicate Header', + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, col: 'Header2', - message: 'Duplicate header' + message: 'Duplicate Header' } ]); }); @@ -102,17 +103,17 @@ describe('hasRequiredHeadersValidator', () => { expect(csvWorkSheet.csvValidation.headerErrors).to.eql([ { - errorCode: 'Missing Required Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, col: 'Header1', message: 'Missing required header' }, { - errorCode: 'Missing Required Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, col: 'Header2', message: 'Missing required header' }, { - errorCode: 'Missing Required Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, col: 'Header3', message: 'Missing required header' } @@ -134,12 +135,12 @@ describe('hasRequiredHeadersValidator', () => { expect(csvWorkSheet.csvValidation.headerErrors).to.eql([ { - errorCode: 'Missing Required Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, col: 'Header3', message: 'Missing required header' }, { - errorCode: 'Missing Required Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, col: 'Header5', message: 'Missing required header' } @@ -203,12 +204,12 @@ describe('getValidHeadersValidator', () => { expect(csvWorkSheet.csvValidation.headerErrors).to.eql([ { - errorCode: 'Unknown Header', + errorCode: SUBMISSION_MESSAGE_TYPE.UNKNOWN_HEADER, col: 'UnknownHeader2', message: 'Unsupported header' }, { - errorCode: 'Unknown Header', + errorCode: SUBMISSION_MESSAGE_TYPE.UNKNOWN_HEADER, col: 'UnknownHeader4', message: 'Unsupported header' } @@ -262,12 +263,12 @@ describe('hasRecommendedHeadersValidator', () => { expect(csvWorkSheet.csvValidation.headerErrors).to.eql([ { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, col: 'Header3', message: 'Missing recommended header' }, { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, col: 'Header5', message: 'Missing recommended header' } @@ -289,17 +290,17 @@ describe('hasRecommendedHeadersValidator', () => { expect(csvWorkSheet.csvValidation.headerErrors).to.eql([ { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, col: 'Header1', message: 'Missing recommended header' }, { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, col: 'Header2', message: 'Missing recommended header' }, { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, col: 'Header3', message: 'Missing recommended header' } diff --git a/api/src/utils/media/csv/validation/csv-header-validator.ts b/api/src/utils/media/csv/validation/csv-header-validator.ts index bc8d05a8aa..504977d211 100644 --- a/api/src/utils/media/csv/validation/csv-header-validator.ts +++ b/api/src/utils/media/csv/validation/csv-header-validator.ts @@ -1,3 +1,5 @@ +import { SUBMISSION_MESSAGE_TYPE } from '../../../../constants/status'; +import { safeToLowerCase, safeTrim } from '../../../string-utils'; import { CSVValidator } from '../csv-file'; /** @@ -19,8 +21,8 @@ export const getDuplicateHeadersValidator = (): CSVValidator => { if (seenHeaders.includes(header)) { csvWorksheet.csvValidation.addHeaderErrors([ { - errorCode: 'Duplicate Header', - message: 'Duplicate header', + errorCode: SUBMISSION_MESSAGE_TYPE.DUPLICATE_HEADER, + message: 'Duplicate Header', col: header } ]); @@ -60,25 +62,11 @@ export const hasRequiredHeadersValidator = (config?: FileRequiredHeaderValidator const headersLowerCase = csvWorksheet.getHeadersLowerCase(); - if (!headersLowerCase?.length) { - csvWorksheet.csvValidation.addHeaderErrors( - config.file_required_columns_validator.required_columns.map((requiredHeader) => { - return { - errorCode: 'Missing Required Header', - message: 'Missing required header', - col: requiredHeader - }; - }) - ); - - return csvWorksheet; - } - for (const requiredHeader of config.file_required_columns_validator.required_columns) { - if (!headersLowerCase.includes(requiredHeader.toLowerCase())) { + if (!headersLowerCase.includes(safeToLowerCase(requiredHeader))) { csvWorksheet.csvValidation.addHeaderErrors([ { - errorCode: 'Missing Required Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_HEADER, message: 'Missing required header', col: requiredHeader } @@ -120,7 +108,7 @@ export const hasRecommendedHeadersValidator = (config?: FileRecommendedHeaderVal csvWorksheet.csvValidation.addHeaderWarnings( config.file_recommended_columns_validator.recommended_columns.map((recommendedHeader) => { return { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, message: 'Missing recommended header', col: recommendedHeader }; @@ -131,10 +119,10 @@ export const hasRecommendedHeadersValidator = (config?: FileRecommendedHeaderVal } for (const recommendedHeader of config.file_recommended_columns_validator.recommended_columns) { - if (!headersLowerCase.includes(recommendedHeader.toLowerCase())) { + if (!headersLowerCase.includes(safeToLowerCase(recommendedHeader))) { csvWorksheet.csvValidation.addHeaderWarnings([ { - errorCode: 'Missing Recommended Header', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_RECOMMENDED_HEADER, message: 'Missing recommended header', col: recommendedHeader } @@ -175,12 +163,12 @@ export const getValidHeadersValidator = (config?: FileValidHeadersValidatorConfi for (const header of headers) { if ( !config.file_valid_columns_validator.valid_columns - .map((item) => item.toLowerCase()) - .includes(header.trim().toLowerCase()) + .map(safeToLowerCase) + .includes(safeToLowerCase(safeTrim(header))) ) { csvWorksheet.csvValidation.addHeaderWarnings([ { - errorCode: 'Unknown Header', + errorCode: SUBMISSION_MESSAGE_TYPE.UNKNOWN_HEADER, message: 'Unsupported header', col: header } diff --git a/api/src/utils/media/csv/validation/csv-row-validator.test.ts b/api/src/utils/media/csv/validation/csv-row-validator.test.ts index cf4976d857..85b1b6046c 100644 --- a/api/src/utils/media/csv/validation/csv-row-validator.test.ts +++ b/api/src/utils/media/csv/validation/csv-row-validator.test.ts @@ -1,20 +1,25 @@ import { expect } from 'chai'; import { describe } from 'mocha'; import xlsx from 'xlsx'; +import { SUBMISSION_MESSAGE_TYPE } from '../../../../constants/status'; import { CSVWorksheet } from '../csv-file'; import { + FileColumnUniqueValidatorConfig, getCodeValueFieldsValidator, getNumericFieldsValidator, getRequiredFieldsValidator, + getUniqueColumnsValidator, getValidFormatFieldsValidator, getValidRangeFieldsValidator } from './csv-row-validator'; describe('getRequiredFieldsValidator', () => { - it('adds no errors when required fields are not provided', () => { - const requiredFieldsByHeader: string[] = []; + it('adds no errors when required fields are populated', () => { + const requiredColumnsConfig = { + columnName: 'Header1' + }; - const validator = getRequiredFieldsValidator(requiredFieldsByHeader); + const validator = getRequiredFieldsValidator(requiredColumnsConfig); const xlsxWorkSheet = xlsx.utils.aoa_to_sheet([ ['Header1', 'Header2'], @@ -29,8 +34,11 @@ describe('getRequiredFieldsValidator', () => { }); it('adds no errors when header does not exist', () => { - const requiredFieldsByHeader: string[] = ['Header1', 'Header2']; // fields for these headers are required - const validator = getRequiredFieldsValidator(requiredFieldsByHeader); + const requiredColumnsConfig = { + columnName: 'Header1' + }; + + const validator = getRequiredFieldsValidator(requiredColumnsConfig); const xlsxWorkSheet = xlsx.utils.aoa_to_sheet([[], [5]]); @@ -41,10 +49,12 @@ describe('getRequiredFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([]); }); - it('adds errors for every field if required fields are provided and there are zero data rows in the worksheet', () => { - const requiredFieldsByHeader: string[] = ['Header1', 'Header2']; // fields for these headers are required + it('adds no errors if there are zero rows in the worksheet', () => { + const requiredColumnsConfig = { + columnName: 'Header1' + }; - const validator = getRequiredFieldsValidator(requiredFieldsByHeader); + const validator = getRequiredFieldsValidator(requiredColumnsConfig); const xlsxWorkSheet = xlsx.utils.aoa_to_sheet([['Header1', 'Header2']]); // no data rows @@ -52,26 +62,15 @@ describe('getRequiredFieldsValidator', () => { validator(csvWorkSheet); - expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ - { - col: 'Header1', - errorCode: 'Missing Required Field', - message: 'Missing required value for column', - row: 2 - }, - { - col: 'Header2', - errorCode: 'Missing Required Field', - message: 'Missing required value for column', - row: 2 - } - ]); + expect(csvWorkSheet.csvValidation.rowErrors).to.eql([]); }); it('adds errors for required fields that are empty', () => { - const requiredFieldsByHeader: string[] = ['Header1', 'Header2']; // fields for these headers are required + const requiredColumnsConfig = { + columnName: 'Header1' + }; - const validator = getRequiredFieldsValidator(requiredFieldsByHeader); + const validator = getRequiredFieldsValidator(requiredColumnsConfig); const xlsxWorkSheet = xlsx.utils.aoa_to_sheet([ ['Header1', 'Header2', 'Header3'], @@ -85,21 +84,23 @@ describe('getRequiredFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Missing Required Field', - message: 'Missing required value for column', + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, + message: 'Value is required and cannot be empty', row: 2 } ]); }); - it('adds no errors if there are no invalid required fields', () => { - const requiredFieldsByHeader: string[] = ['Header1', 'Header2']; // fields for these headers are required + it('adds no errors if there are no empty required fields', () => { + const requiredColumnsConfig = { + columnName: 'Header1' + }; - const validator = getRequiredFieldsValidator(requiredFieldsByHeader); + const validator = getRequiredFieldsValidator(requiredColumnsConfig); const xlsxWorkSheet = xlsx.utils.aoa_to_sheet([ ['Header1', 'Header2', 'Header3'], - ['header2Data', 'Header2Data', ''] // valid fields + ['header1Data', 'Header2Data', ''] // valid fields ]); const csvWorkSheet = new CSVWorksheet('Sheet1', xlsxWorkSheet); @@ -181,7 +182,7 @@ describe('getCodeValueFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Invalid Value', + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, message: 'Invalid value: invalidCode. Must be one of [Code1, Code2]', row: 2 } @@ -304,7 +305,7 @@ describe('getValidRangeFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: 'Invalid value: 11. Value must be between 1 and 10 ', row: 2 } @@ -331,7 +332,7 @@ describe('getValidRangeFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: 'Invalid value: 1. Value must be between 5 and 10 ', row: 2 } @@ -357,7 +358,7 @@ describe('getValidRangeFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: 'Invalid value: 11. Value must be less than 10 ', row: 2 } @@ -383,7 +384,7 @@ describe('getValidRangeFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: 'Invalid value: 4. Value must be greater than 5 ', row: 2 } @@ -410,7 +411,7 @@ describe('getValidRangeFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Invalid Value', + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, message: 'Invalid value: a. Value must be a number ', row: 2 } @@ -467,7 +468,7 @@ describe('getNumericFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Invalid Value', + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, message: 'Invalid value: a. Value must be a number ', row: 2 } @@ -571,7 +572,7 @@ describe('getValidFormatFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Unexpected Format', + errorCode: SUBMISSION_MESSAGE_TYPE.UNEXPECTED_FORMAT, message: 'Unexpected Format: WPT 1. Must be in the format "WPT X": WPT 11 (case sensitive)', row: 2 } @@ -598,10 +599,93 @@ describe('getValidFormatFieldsValidator', () => { expect(csvWorkSheet.csvValidation.rowErrors).to.eql([ { col: 'Header1', - errorCode: 'Unexpected Format', + errorCode: SUBMISSION_MESSAGE_TYPE.UNEXPECTED_FORMAT, message: 'Unexpected Format: WXT1. Must be in the format "WPT X": WPT 11 (case sensitive)', row: 2 } ]); }); + + describe('getValidFormatFieldsValidator', () => { + it('adds no errors when no config is supplied', () => { + const validator = getUniqueColumnsValidator(); + const worksheet = xlsx.utils.aoa_to_sheet([['Header1'], ['stuff']]); + const csvWorkSheet = new CSVWorksheet('Sheet', worksheet); + + validator(csvWorkSheet); + + expect(csvWorkSheet.csvValidation.rowErrors).to.be.empty; + }); + + it('adds no errors when no columns are specified in config', () => { + const config: FileColumnUniqueValidatorConfig = { + file_column_unique_validator: { + column_names: [''] + } + }; + const validator = getUniqueColumnsValidator(config); + const worksheet = xlsx.utils.aoa_to_sheet([['Header1'], ['stuff']]); + const csvWorkSheet = new CSVWorksheet('Sheet', worksheet); + + validator(csvWorkSheet); + + expect(csvWorkSheet.csvValidation.rowErrors).to.be.empty; + }); + + it('adds no errors when specified key column is missing from the worksheet', () => { + const config: FileColumnUniqueValidatorConfig = { + file_column_unique_validator: { + column_names: ['Header1', 'Header2'] + } + }; + const validator = getUniqueColumnsValidator(config); + const worksheet = xlsx.utils.aoa_to_sheet([['Header1'], ['stuff']]); + const csvWorkSheet = new CSVWorksheet('Sheet', worksheet); + + validator(csvWorkSheet); + + expect(csvWorkSheet.csvValidation.rowErrors).to.be.empty; + }); + + it('adds no errors when all keys specified are unique', () => { + const config: FileColumnUniqueValidatorConfig = { + file_column_unique_validator: { + column_names: ['Header1', 'Header2'] + } + }; + const validator = getUniqueColumnsValidator(config); + const worksheet = xlsx.utils.aoa_to_sheet([ + ['Header1', 'Header2', 'Header3'], + [1, 2, 3], + [2, 2, 3], + [3, 2, 3] + ]); + const csvWorkSheet = new CSVWorksheet('Sheet', worksheet); + + validator(csvWorkSheet); + + expect(csvWorkSheet.csvValidation.rowErrors).to.be.empty; + }); + + it('adds errors when not all keys are unique', () => { + const config: FileColumnUniqueValidatorConfig = { + file_column_unique_validator: { + column_names: ['Header1', 'Header2'] + } + }; + const validator = getUniqueColumnsValidator(config); + const worksheet = xlsx.utils.aoa_to_sheet([ + ['Header1', 'Header2', 'Header3'], + [1, 2, 3], + [2, 2, 3], + [2, 2, 3] + ]); + const csvWorkSheet = new CSVWorksheet('Sheet', worksheet); + + validator(csvWorkSheet); + + expect(csvWorkSheet.csvValidation.rowErrors).to.not.be.empty; + expect(csvWorkSheet.csvValidation.rowErrors[0].errorCode).to.be.eql(SUBMISSION_MESSAGE_TYPE.NON_UNIQUE_KEY); + }); + }); }); diff --git a/api/src/utils/media/csv/validation/csv-row-validator.ts b/api/src/utils/media/csv/validation/csv-row-validator.ts index a5ffcfd147..286347f199 100644 --- a/api/src/utils/media/csv/validation/csv-row-validator.ts +++ b/api/src/utils/media/csv/validation/csv-row-validator.ts @@ -1,57 +1,46 @@ +import { SUBMISSION_MESSAGE_TYPE } from '../../../../constants/status'; +import { safeToLowerCase } from '../../../string-utils'; import { CSVValidator } from '../csv-file'; +export type RequiredFieldsValidatorConfig = { + columnName: string; +}; + /** - * TODO needs updating to use new config style, etc. + * For a specified column, adds an error for each row whose column value is null, undefined or empty. + * + * @param {RequiredFieldsValidatorConfig} [config] + * @return {*} {CSVValidator} */ -export const getRequiredFieldsValidator = (requiredFieldsByHeader?: string[]): CSVValidator => { +export const getRequiredFieldsValidator = (config?: RequiredFieldsValidatorConfig): CSVValidator => { return (csvWorksheet) => { - if (!requiredFieldsByHeader?.length) { + if (!config) { return csvWorksheet; } const rows = csvWorksheet.getRows(); - - // If there are no rows, then add errors for all cells in the first data row based on the array of required headers - if (!rows?.length) { - csvWorksheet.csvValidation.addRowErrors( - requiredFieldsByHeader.map((requiredFieldByHeader) => { - return { - errorCode: 'Missing Required Field', - message: `Missing required value for column`, - col: requiredFieldByHeader, - row: 2 - }; - }) - ); - - return csvWorksheet; - } - const headersLowerCase = csvWorksheet.getHeadersLowerCase(); - // If there are rows, then check each cell in each row against the list of required headers, adding errors as needed rows.forEach((row, rowIndex) => { - for (const requiredFieldByHeader of requiredFieldsByHeader) { - const columnIndex = headersLowerCase.indexOf(requiredFieldByHeader.toLowerCase()); + const columnIndex = headersLowerCase.indexOf(safeToLowerCase(config.columnName)); - //if column does not exist, return csvWorksheet - if (columnIndex < 0) { - return csvWorksheet; - } + // if column does not exist, return + if (columnIndex < 0) { + return csvWorksheet; + } - const rowValueForColumn = row[columnIndex]; + const rowValueForColumn = row[columnIndex]; - // Add an error if the cell value is empty - if (rowValueForColumn === undefined || rowValueForColumn === null || rowValueForColumn === '') { - csvWorksheet.csvValidation.addRowErrors([ - { - errorCode: 'Missing Required Field', - message: `Missing required value for column`, - col: requiredFieldByHeader, - row: rowIndex + 2 - } - ]); - } + if (rowValueForColumn == undefined || rowValueForColumn === null || rowValueForColumn === '') { + // cell is empty when it is required, add an error for this cell + csvWorksheet.csvValidation.addRowErrors([ + { + errorCode: SUBMISSION_MESSAGE_TYPE.MISSING_REQUIRED_FIELD, + message: `Value is required and cannot be empty`, + col: config.columnName, + row: rowIndex + 2 + } + ]); } }); @@ -92,7 +81,7 @@ export const getCodeValueFieldsValidator = (config?: ColumnCodeValidatorConfig): const headersLowerCase = csvWorksheet.getHeadersLowerCase(); rows.forEach((row, rowIndex) => { - const columnIndex = headersLowerCase.indexOf(config.columnName.toLowerCase()); + const columnIndex = headersLowerCase.indexOf(safeToLowerCase(config.columnName)); // if column does not exist, return if (columnIndex < 0) { @@ -107,17 +96,17 @@ export const getCodeValueFieldsValidator = (config?: ColumnCodeValidatorConfig): } // compare allowed code values as lowercase strings - const allowedCodeValuesLowerCase: string[] = []; + const allowedCodeValuesLowerCase: (string | number)[] = []; const allowedCodeValues = config.column_code_validator.allowed_code_values.map((allowedCode) => { - allowedCodeValuesLowerCase.push(allowedCode.name?.toString().toLowerCase()); + allowedCodeValuesLowerCase.push(safeToLowerCase(allowedCode.name)); return allowedCode.name; }); // Add an error if the cell value is not one of the elements in the codeValues array - if (!allowedCodeValuesLowerCase.includes(rowValueForColumn?.toLowerCase())) { + if (!allowedCodeValuesLowerCase.includes(safeToLowerCase(rowValueForColumn))) { csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Invalid Value', + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, message: `Invalid value: ${rowValueForColumn}. Must be one of [${allowedCodeValues.join(', ')}]`, col: config.columnName, row: rowIndex + 2 @@ -159,7 +148,7 @@ export const getValidRangeFieldsValidator = (config?: ColumnRangeValidatorConfig const headersLowerCase = csvWorksheet.getHeadersLowerCase(); rows.forEach((row, rowIndex) => { - const columnIndex = headersLowerCase.indexOf(config.columnName.toLowerCase()); + const columnIndex = headersLowerCase.indexOf(safeToLowerCase(config.columnName)); // if column does not exist, return if (columnIndex < 0) { @@ -168,10 +157,15 @@ export const getValidRangeFieldsValidator = (config?: ColumnRangeValidatorConfig const rowValueForColumn = Number(row[columnIndex]); - if (isNaN(rowValueForColumn)) { + if (rowValueForColumn === undefined || rowValueForColumn === null) { + // cell is empty, use the getRequiredFieldsValidator to assert required fields + return csvWorksheet; + } + + if (isNaN(rowValueForColumn) && typeof row[columnIndex] === 'string') { csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Invalid Value', + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, message: `Invalid value: ${row[columnIndex]}. Value must be a number `, col: config.columnName, row: rowIndex + 2 @@ -188,7 +182,7 @@ export const getValidRangeFieldsValidator = (config?: ColumnRangeValidatorConfig // Add an error if the cell value is not in the correct range provided in the array csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: `Invalid value: ${rowValueForColumn}. Value must be between ${config.column_range_validator.min_value} and ${config.column_range_validator.max_value} `, col: config.columnName, row: rowIndex + 2 @@ -201,7 +195,7 @@ export const getValidRangeFieldsValidator = (config?: ColumnRangeValidatorConfig // Add an error if the cell value is not in the correct range provided in the array csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: `Invalid value: ${rowValueForColumn}. Value must be less than ${config.column_range_validator.max_value} `, col: config.columnName, row: rowIndex + 2 @@ -214,7 +208,7 @@ export const getValidRangeFieldsValidator = (config?: ColumnRangeValidatorConfig // Add an error if the cell value is not in the correct range provided in the array csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Out of Range', + errorCode: SUBMISSION_MESSAGE_TYPE.OUT_OF_RANGE, message: `Invalid value: ${rowValueForColumn}. Value must be greater than ${config.column_range_validator.min_value} `, col: config.columnName, row: rowIndex + 2 @@ -255,7 +249,7 @@ export const getNumericFieldsValidator = (config?: ColumnNumericValidatorConfig) const headersLowerCase = csvWorksheet.getHeadersLowerCase(); rows.forEach((row, rowIndex) => { - const columnIndex = headersLowerCase.indexOf(config.columnName.toLowerCase()); + const columnIndex = headersLowerCase.indexOf(safeToLowerCase(config.columnName)); // if column does not exist, return if (columnIndex < 0) { @@ -271,7 +265,7 @@ export const getNumericFieldsValidator = (config?: ColumnNumericValidatorConfig) if (isNaN(rowValueForColumn)) { csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Invalid Value', + errorCode: SUBMISSION_MESSAGE_TYPE.INVALID_VALUE, message: `Invalid value: ${row[columnIndex]}. Value must be a number `, col: config.columnName, row: rowIndex + 2 @@ -318,7 +312,7 @@ export const getValidFormatFieldsValidator = (config?: ColumnFormatValidatorConf const headersLowerCase = csvWorksheet.getHeadersLowerCase(); rows.forEach((row, rowIndex) => { - const columnIndex = headersLowerCase.indexOf(config.columnName.toLowerCase()); + const columnIndex = headersLowerCase.indexOf(safeToLowerCase(config.columnName)); // if column does not exist, return if (columnIndex < 0) { @@ -339,7 +333,7 @@ export const getValidFormatFieldsValidator = (config?: ColumnFormatValidatorConf if (!regex.test(rowValueForColumn)) { csvWorksheet.csvValidation.addRowErrors([ { - errorCode: 'Unexpected Format', + errorCode: SUBMISSION_MESSAGE_TYPE.UNEXPECTED_FORMAT, message: `Unexpected Format: ${rowValueForColumn}. ${config.column_format_validator.expected_format}`, col: config.columnName, row: rowIndex + 2 @@ -351,3 +345,58 @@ export const getValidFormatFieldsValidator = (config?: ColumnFormatValidatorConf return csvWorksheet; }; }; + +export type FileColumnUniqueValidatorConfig = { + file_column_unique_validator: { + column_names: string[]; + }; +}; + +export const getUniqueColumnsValidator = (config?: FileColumnUniqueValidatorConfig): CSVValidator => { + return (csvWorksheet) => { + if (!config) { + return csvWorksheet; + } + + if (config.file_column_unique_validator.column_names.length < 1) { + return csvWorksheet; + } + + const keySet = new Set(); + const rows = csvWorksheet.getRowObjects(); + const lowercaseHeaders = csvWorksheet.getHeadersLowerCase(); + + // find the indices of all provided column names in the worksheet + const columnIndices = config.file_column_unique_validator.column_names.map((column) => + lowercaseHeaders.indexOf(safeToLowerCase(column)) + ); + + // checks list of column indices if any are missing (-1) and returns early + if (columnIndices.includes(-1)) { + return csvWorksheet; + } + + rows.forEach((row, rowIndex) => { + const key = config.file_column_unique_validator.column_names + .map((columnIndex) => `${row[columnIndex] || ''}`.trim().toLowerCase()) + .join(', '); + // check if key exists already + if (!keySet.has(key)) { + keySet.add(key); + } else { + // duplicate key found + csvWorksheet.csvValidation.addRowErrors([ + { + errorCode: SUBMISSION_MESSAGE_TYPE.NON_UNIQUE_KEY, + message: `Duplicate key(s): ${key} found in column(s): ${config.file_column_unique_validator.column_names.join( + ', ' + )}. Keys must be unique for proper template transformation`, + col: key, + row: rowIndex + 2 + } + ]); + } + }); + return csvWorksheet; + }; +}; diff --git a/api/src/utils/media/dwc/dwc-archive-file.ts b/api/src/utils/media/dwc/dwc-archive-file.ts index 2cec9a2519..5b32a5a574 100644 --- a/api/src/utils/media/dwc/dwc-archive-file.ts +++ b/api/src/utils/media/dwc/dwc-archive-file.ts @@ -1,20 +1,23 @@ import xlsx from 'xlsx'; -import { CSVWorksheet, ICsvState } from '../csv/csv-file'; +import { CSVWorkBook, CSVWorksheet, ICsvState } from '../csv/csv-file'; import { ArchiveFile, IMediaState, MediaValidation } from '../media-file'; import { ValidationSchemaParser } from '../validation/validation-schema-parser'; export enum DWC_CLASS { + RECORD = 'record', EVENT = 'event', + LOCATION = 'location', OCCURRENCE = 'occurrence', MEASUREMENTORFACT = 'measurementorfact', RESOURCERELATIONSHIP = 'resourcerelationship', TAXON = 'taxon', - META = 'meta' + META = 'meta', + EML = 'eml' } export const DEFAULT_XLSX_SHEET = 'Sheet1'; -export type DWCWorksheets = { [name in DWC_CLASS]?: CSVWorksheet }; +export type DWCWorksheets = Partial<{ [name in DWC_CLASS]: CSVWorksheet }>; /** * Supports Darwin Core Archive CSV files. @@ -50,12 +53,24 @@ export class DWCArchive { _initArchiveFiles() { for (const rawFile of this.rawFile.mediaFiles) { switch (rawFile.name) { + case DWC_CLASS.RECORD: + this.worksheets[DWC_CLASS.RECORD] = new CSVWorksheet( + rawFile.name, + xlsx.read(rawFile.buffer).Sheets[DEFAULT_XLSX_SHEET] + ); + break; case DWC_CLASS.EVENT: this.worksheets[DWC_CLASS.EVENT] = new CSVWorksheet( rawFile.name, xlsx.read(rawFile.buffer).Sheets[DEFAULT_XLSX_SHEET] ); break; + case DWC_CLASS.LOCATION: + this.worksheets[DWC_CLASS.LOCATION] = new CSVWorksheet( + rawFile.name, + xlsx.read(rawFile.buffer).Sheets[DEFAULT_XLSX_SHEET] + ); + break; case DWC_CLASS.OCCURRENCE: this.worksheets[DWC_CLASS.OCCURRENCE] = new CSVWorksheet( rawFile.name, @@ -82,40 +97,85 @@ export class DWCArchive { break; case DWC_CLASS.META: this.extra[DWC_CLASS.META] = rawFile; + break; + case DWC_CLASS.EML: + this.extra[DWC_CLASS.EML] = rawFile; + break; } } } - isMediaValid(validationSchemaParser: ValidationSchemaParser): IMediaState { - const validators = validationSchemaParser.getSubmissionValidations(); + /** + * Makes a CSV workbook from the worksheets included in the DwC archive file, enabling us + * to run workbook validation on them. + * + * @return {*} {xlsx.WorkBook} The workbook made from all worksheets. + * @memberof DWCArchive + */ + _workbookFromWorksheets(): xlsx.WorkBook { + const workbook = xlsx.utils.book_new(); - const mediaValidation = this.validate(validators as DWCArchiveValidator[]); + Object.entries(this.worksheets).forEach(([key, worksheet]) => { + if (worksheet) { + xlsx.utils.book_append_sheet(workbook, worksheet, key); + } + }); - return mediaValidation.getState(); + return workbook; } - isContentValid(validationSchemaParser: ValidationSchemaParser): ICsvState[] { - const csvStates: ICsvState[] = []; + /** + * Runs all media-related validation for this DwC archive, based on given validation schema parser. + * @param validationSchemaParser The validation schema + * @returns {*} {void} + * @memberof DWCArchive + */ + validateMedia(validationSchemaParser: ValidationSchemaParser): void { + const validators = validationSchemaParser.getSubmissionValidations(); + + this.validate(validators as DWCArchiveValidator[]); + } - Object.keys(this.worksheets).forEach((fileName) => { + /** + * Runs all content and workbook-related validation for this DwC archive, based on the given validation + * schema parser. + * @param {ValidationSchemaParser} validationSchemaParser The validation schema + * @returns {*} {void} + * @memberof DWCArchive + */ + validateContent(validationSchemaParser: ValidationSchemaParser): void { + // Run workbook validators + const workbookValidators = validationSchemaParser.getWorkbookValidations(); + const csvWorkbook = new CSVWorkBook(this._workbookFromWorksheets()); + csvWorkbook.validate(workbookValidators); + + // Run content validators + Object.entries(this.worksheets).forEach(([fileName, worksheet]) => { const fileValidators = validationSchemaParser.getFileValidations(fileName); - const columnValidators = validationSchemaParser.getAllColumnValidations(fileName); - const validators = [...fileValidators, ...columnValidators]; - - const worksheet: CSVWorksheet = this.worksheets[fileName]; - - if (!worksheet) { - return; + if (worksheet) { + worksheet.validate([...fileValidators, ...columnValidators]); } - - const csvValidation = worksheet.validate(validators); - - csvStates.push(csvValidation.getState()); }); + } - return csvStates; + /** + * Returns the current media state belonging to the DwC archive file. + * @returns {*} {IMediaState} The state of the DwC archive media. + */ + getMediaState(): IMediaState { + return this.mediaValidation.getState(); + } + + /** + * Returns the current CSV states belonging to all worksheets in the DwC archive file. + * @returns {*} {ICsvState[]} The state of each worksheet in the archive file. + */ + getContentState(): ICsvState[] { + return Object.values(this.worksheets) + .filter((worksheet: CSVWorksheet | undefined): worksheet is CSVWorksheet => Boolean(worksheet)) + .map((worksheet: CSVWorksheet) => worksheet.csvValidation.getState()); } /** diff --git a/api/src/utils/media/media-utils.ts b/api/src/utils/media/media-utils.ts index c7e85bbe02..52f57fc049 100644 --- a/api/src/utils/media/media-utils.ts +++ b/api/src/utils/media/media-utils.ts @@ -56,7 +56,7 @@ export const parseUnknownS3File = (rawMedia: GetObjectOutput): null | MediaFile * Note: Ignores any directory structures, flattening all nested files into a single array. * * @param {Buffer} zipFile - * @return {*} {ArchiveFile} + * @return {*} {MediaFile[]} */ export const parseUnknownZipFile = (zipFile: Buffer): MediaFile[] => { const unzippedFile = new AdmZip(zipFile); diff --git a/api/src/utils/media/validation/file-type-and-content-validator.ts b/api/src/utils/media/validation/file-type-and-content-validator.ts index d697e50415..cbceca2f9d 100644 --- a/api/src/utils/media/validation/file-type-and-content-validator.ts +++ b/api/src/utils/media/validation/file-type-and-content-validator.ts @@ -1,3 +1,4 @@ +import { safeToLowerCase } from '../../string-utils'; import { DWCArchive, DWCArchiveValidator } from '../dwc/dwc-archive-file'; import { MediaValidator } from '../media-file'; import { XLSXCSV, XLSXCSVValidator } from '../xlsx/xlsx-file'; @@ -93,7 +94,7 @@ const checkRequiredFieldsInDWCArchive = (dwcArchive: DWCArchive, config: Submiss const fileNames = dwcArchive.rawFile.mediaFiles.map((mediaFile) => mediaFile.name); config.submission_required_files_validator.required_files.forEach((requiredFile) => { - if (!fileNames.includes(requiredFile.toLowerCase())) { + if (!fileNames.includes(safeToLowerCase(requiredFile))) { dwcArchive.mediaValidation.addFileErrors([`Missing required file: ${requiredFile}`]); } }); @@ -112,10 +113,10 @@ const checkRequiredFieldsInXLSXCSV = (xlsxCsv: XLSXCSV, config: SubmissionRequir return xlsxCsv; } - const worksheetNames = Object.keys(xlsxCsv.workbook.worksheets).map((item) => item.toLowerCase()); + const worksheetNames = Object.keys(xlsxCsv.workbook.worksheets).map(safeToLowerCase); config.submission_required_files_validator.required_files.forEach((requiredFile) => { - if (!worksheetNames.includes(requiredFile.toLowerCase())) { + if (!worksheetNames.includes(safeToLowerCase(requiredFile))) { xlsxCsv.mediaValidation.addFileErrors([`Missing required sheet: ${requiredFile}`]); } }); diff --git a/api/src/utils/media/validation/validation-schema-parser.test.ts b/api/src/utils/media/validation/validation-schema-parser.test.ts index 051913603d..e6dfb85a2b 100644 --- a/api/src/utils/media/validation/validation-schema-parser.test.ts +++ b/api/src/utils/media/validation/validation-schema-parser.test.ts @@ -27,6 +27,9 @@ const sampleValidationSchema = { }, { column_range_validator: {} + }, + { + column_required_validator: {} } ] } @@ -126,11 +129,12 @@ describe('ValidationSchemaParser', () => { const validators = validationSchemaParser.getAllColumnValidations('testFile1'); - expect(validators.length).to.equal(3); + expect(validators.length).to.equal(4); expect(typeof validators[0]).to.equal('function'); expect(typeof validators[1]).to.equal('function'); expect(typeof validators[2]).to.equal('function'); + expect(typeof validators[3]).to.equal('function'); }); }); @@ -150,7 +154,7 @@ describe('ValidationSchemaParser', () => { it('returns an array of validation schemas', () => { const validationSchemaParser = new ValidationSchemaParser(sampleValidationSchema); - const validationSchemas = validationSchemaParser.getSubmissionValidationSChemas(); + const validationSchemas = validationSchemaParser.getSubmissionValidationSchemas(); expect(validationSchemas).to.eql([ { mimetype_validator: {} }, diff --git a/api/src/utils/media/validation/validation-schema-parser.ts b/api/src/utils/media/validation/validation-schema-parser.ts index 472d22e6c8..c56b8ccad1 100644 --- a/api/src/utils/media/validation/validation-schema-parser.ts +++ b/api/src/utils/media/validation/validation-schema-parser.ts @@ -1,5 +1,5 @@ import jsonpath from 'jsonpath'; -import { CSVValidator } from '../csv/csv-file'; +import { CSVValidator, WorkBookValidator } from '../csv/csv-file'; import { getDuplicateHeadersValidator, getValidHeadersValidator, @@ -10,10 +10,12 @@ import { getCodeValueFieldsValidator, getNumericFieldsValidator, getRequiredFieldsValidator, + getUniqueColumnsValidator, getValidFormatFieldsValidator, getValidRangeFieldsValidator } from '../csv/validation/csv-row-validator'; import { DWCArchiveValidator } from '../dwc/dwc-archive-file'; +import { getParentChildKeyMatchValidator } from '../xlsx/validation/xlsx-validation'; import { XLSXCSVValidator } from '../xlsx/xlsx-file'; import { getFileEmptyValidator, @@ -35,6 +37,10 @@ export const ValidationRulesRegistry = { name: 'submission_required_files_validator', generator: getRequiredFilesValidator }, + { + name: 'workbook_parent_child_key_match_validator', + generator: getParentChildKeyMatchValidator + }, { name: 'file_duplicate_columns_validator', generator: getDuplicateHeadersValidator @@ -52,7 +58,7 @@ export const ValidationRulesRegistry = { generator: getValidHeadersValidator }, { - name: '', + name: 'column_required_validator', generator: getRequiredFieldsValidator }, { @@ -70,6 +76,10 @@ export const ValidationRulesRegistry = { { name: 'column_numeric_validator', generator: getNumericFieldsValidator + }, + { + name: 'file_column_unique_validator', + generator: getUniqueColumnsValidator } ], findMatchingRule(name: string): any { @@ -89,7 +99,7 @@ export class ValidationSchemaParser { } getSubmissionValidations(): (DWCArchiveValidator | XLSXCSVValidator)[] { - const validationSchemas = this.getSubmissionValidationSChemas(); + const validationSchemas = this.getSubmissionValidationSchemas(); const rules: (DWCArchiveValidator | XLSXCSVValidator)[] = []; @@ -144,6 +154,39 @@ export class ValidationSchemaParser { return rules; } + /** + * Retreives all validation rules for workbooks. Workbook validations differ from submission + * validations in that they alter the validation state of each worksheet within the workbook. + * @returns {*} {WorkBookValidator[]} All workbook validation rules for the given submission. + */ + getWorkbookValidations(): WorkBookValidator[] { + const validationSchemas = this.getWorkbookValidationSchemas(); + + const rules: WorkBookValidator[] = []; + + validationSchemas.forEach((validationSchema) => { + const keys = Object.keys(validationSchema); + + if (keys.length !== 1) { + return; + } + + const key = keys[0]; + + const generatorFunction = ValidationRulesRegistry.findMatchingRule(key); + + if (!generatorFunction) { + return; + } + + const rule = generatorFunction(validationSchema); + + rules.push(rule); + }); + + return rules; + } + getAllColumnValidations(fileName: string): CSVValidator[] { const columnNames = this.getColumnNames(fileName); @@ -186,10 +229,14 @@ export class ValidationSchemaParser { return rules; } - getSubmissionValidationSChemas(): object[] { + getSubmissionValidationSchemas(): object[] { return jsonpath.query(this.validationSchema, this.getSubmissionValidationsJsonPath())?.[0] || []; } + getWorkbookValidationSchemas(): object[] { + return jsonpath.query(this.validationSchema, this.getWorkbookValidationsJsonPath())?.[0] || []; + } + getFileValidationSchemas(fileName: string): object[] { let validationSchemas = jsonpath.query(this.validationSchema, this.getFileValidationsJsonPath(fileName))?.[0] || []; @@ -242,6 +289,10 @@ export class ValidationSchemaParser { return '$.validations'; } + getWorkbookValidationsJsonPath(): string { + return '$.workbookValidations'; + } + getFileValidationsJsonPath(fileName: string): string { return `$.files[?(@.name == '${fileName}')].validations`; } diff --git a/api/src/utils/media/xlsx/transformation/transformation-schema-parser.ts b/api/src/utils/media/xlsx/transformation/transformation-schema-parser.ts deleted file mode 100644 index ac670a34da..0000000000 --- a/api/src/utils/media/xlsx/transformation/transformation-schema-parser.ts +++ /dev/null @@ -1,106 +0,0 @@ -import jsonpath from 'jsonpath'; - -export type FlattenSchema = { - fileName: string; - uniqueId: string[]; - parent?: { fileName: string; uniqueId: string[] }; -}; - -export type TransformationFieldSchema = { - columns?: string[]; - separator?: string; - value?: any; - unique?: string; - condition?: Condition; -}; - -export type TransformationFieldsSchema = { - [key: string]: TransformationFieldSchema; -}; - -export type Condition = { - if: { - columns: string[]; - not?: boolean; - }; -}; - -export type PostTransformationRelatopnshipSchema = { - condition?: Condition; - relationship: { - spreadColumn: string; - uniqueIdColumn: 'string'; - }; -}; - -export type TransformSchema = { - condition?: Condition; - transformations: { - condition?: Condition; - fields: TransformationFieldsSchema; - }[]; - postTransformations?: PostTransformationRelatopnshipSchema[]; -}; - -export type ParseColumnSchema = { source: { columns?: string[]; value?: any }; target: string }; - -export type ParseSchema = { - fileName: string; - columns: ParseColumnSchema[]; - condition?: Condition; -}; -export class TransformationSchemaParser { - transformationSchema: object; - - constructor(transformationSchema: string | object) { - if (typeof transformationSchema === 'string') { - this.transformationSchema = this.parseJson(transformationSchema); - } else { - this.transformationSchema = transformationSchema; - } - } - - getAllFlattenSchemas(): FlattenSchema[] | [] { - return jsonpath.query(this.transformationSchema, this.getFlattenJsonPath())?.[0] || []; - } - - getFlattenSchemas(fileName: string): FlattenSchema | null { - return jsonpath.query(this.transformationSchema, this.getFlattenJsonPathByFileName(fileName))?.[0] || null; - } - - getTransformSchemas(): TransformSchema[] { - return jsonpath.query(this.transformationSchema, this.getTransformationJsonPath())?.[0] || []; - } - - getParseSchemas(): ParseSchema[] { - return jsonpath.query(this.transformationSchema, this.getParseJsonPath())?.[0] || []; - } - - getFlattenJsonPath(): string { - return `$.flatten`; - } - - getFlattenJsonPathByFileName(fileName: string): string { - return `$.flatten[?(@.fileName == '${fileName}')]`; - } - - getTransformationJsonPath(): string { - return '$.transform'; - } - - getParseJsonPath(): string { - return '$.parse'; - } - - parseJson(json: any): object { - let parsedJson; - - try { - parsedJson = JSON.parse(json); - } catch { - throw Error('TransformationSchemaParser - provided validationSchema was not valid JSON'); - } - - return parsedJson; - } -} diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transform-json-path-queries.ts b/api/src/utils/media/xlsx/transformation/xlsx-transform-json-path-queries.ts new file mode 100644 index 0000000000..677fd2c974 --- /dev/null +++ b/api/src/utils/media/xlsx/transformation/xlsx-transform-json-path-queries.ts @@ -0,0 +1,60 @@ +import { + DWCColumnName, + JSONPathString, + MapFieldSchema, + TemplateColumnName, + TemplateSheetName +} from './xlsx-transform-schema-parser'; + +/** + * Get a json path query string that fetches one or more values within an element where `_name=`. + * + * @param {TemplateSheetName} templateSheetName + * @param {TemplateColumnName[]} templateColumnNames + * @return {*} {JSONPathString} + */ +export const getValuesByName = ( + templateSheetName: TemplateSheetName, + templateColumnNames: TemplateColumnName[] +): JSONPathString => `$..[?(@._name === '${templateSheetName}')]..['${templateColumnNames.join(',')}']`; + +/** + * Create a DWC map `MapFieldSchema` object from a static value. + * + * @param {DWCColumnName} dwcColumnName + * @param {string} staticValue + * @return {*} {MapFieldSchema} + */ +export const createValueField = (dwcColumnName: DWCColumnName, staticValue: string): MapFieldSchema => { + return { + columnName: dwcColumnName, + columnValue: [ + { + static: staticValue + } + ] + }; +}; + +/** + * Create a DWC map `MapFieldSchema` object from a single JSONPathString. + * + * @param {DWCColumnName} dwcColumnName + * @param {TemplateSheetName} templateSheetName + * @param {TemplateColumnName[]} templateSheetColumns + * @return {*} {MapFieldSchema} + */ +export const createPathField = ( + dwcColumnName: DWCColumnName, + templateSheetName: TemplateSheetName, + templateSheetColumns: TemplateColumnName[] +): MapFieldSchema => { + return { + columnName: dwcColumnName, + columnValue: [ + { + paths: templateSheetColumns.map((item) => getValuesByName(templateSheetName, [item])) + } + ] + }; +}; diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transform-schema-parser.ts b/api/src/utils/media/xlsx/transformation/xlsx-transform-schema-parser.ts new file mode 100644 index 0000000000..c1af9e0d55 --- /dev/null +++ b/api/src/utils/media/xlsx/transformation/xlsx-transform-schema-parser.ts @@ -0,0 +1,339 @@ +import { JSONPath } from 'jsonpath-plus'; + +export type TemplateSheetName = string; +export type TemplateColumnName = string; + +export type DWCSheetName = string; +export type DWCColumnName = string; + +export type JSONPathString = string; + +export type ConditionSchema = { + type: 'and' | 'or'; + checks: IfNotEmptyCheck[]; +}; + +export type IfNotEmptyCheck = { + ifNotEmpty: JSONPathString; +}; + +export type TemplateMetaForeignKeySchema = { + sheetName: string; + primaryKey: string[]; +}; + +export type TemplateMetaSchema = { + /** + * The name of the template sheet. + * + * @type {string} + */ + sheetName: TemplateSheetName; + /** + * An array of json path query strings. + * + * @type {string[]} + */ + primaryKey: string[]; + parentKey: string[]; + type: 'root' | 'leaf' | ''; + foreignKeys: TemplateMetaForeignKeySchema[]; +}; + +export type MapColumnValuePostfixSchema = { + /** + * An array of json path query strings. + * + * If multiple query strings are provided, they will be fetched in order, and the first one that returns a non-empty + * value will be used. + * + * A single query string may return one value, or an array of values. + * + * @type {JSONPathString[]} + */ + paths?: JSONPathString[]; + /** + * A static value to append to the end of the final `paths` value. + * + * Note: + * - `unique` - If `static` is set to the string `unique`, at transformation time this will be replaced with a unique + * number. This number will be distinct from all other `unique` values, but not necessarily unique from other values + * in the transformed data (it is not a guid). + * + * If `static` is set in addition to `paths`, the `paths` will be ignored. + * + * @type {(string | 'unique')} + */ + static?: string | 'unique'; +}; + +export type MapColumnValueSchema = { + /** + * An array of json path query strings. + * + * If multiple query strings are provided, they will be fetched in order, and the first one that returns a non-empty + * value will be used. + * + * A single query string may return one value, or an array of values. + * + * If an array of values is returned, they will be joined using the specified `join` string. + * If `join` string is not specified, a colon `:` will be used as the default `join` string. + * + * @type {JSONPathString[]} + */ + paths?: JSONPathString[]; + /** + * A static value to be used, in place of any `paths`. + * + * If `static` is set in addition to `paths`, the `paths` will be ignored. + * + * @type {string} + */ + static?: string; + /** + * A string used to join multiple path values (if the `paths` query string returns multiple values that need joining). + * + * Defaults to a colon `:` if not provided. + * + * @type {string} + */ + join?: string; + /** + * A value to append to the end of the final `paths` value. + * + * Will be joined using the `join` value. + * + * @type {MapColumnValuePostfixSchema} + */ + postfix?: MapColumnValuePostfixSchema; + /** + * A condition, which contains one or more checks that must be met in order to proceed processing the schema element. + * + * @type {ConditionSchema} + */ + condition?: ConditionSchema; + /** + * An array of additional Schemas to add to process. Used to create additional records. + * + * @type {MapSchema[]} + */ + add?: MapSchema[]; +}; + +export type MapFieldSchema = { + /** + * The name of the DWC column (term). + * + * @type {DWCColumnName} + */ + columnName: DWCColumnName; + /** + * The schema that defines how the value of the column is produced. + * + * If multiple values are provided, the first one that passes all conditions (if any) and returns a non-empty path + * result will be used. and the remaining values will be skipped. + * + * @type {MapColumnValueSchema[]} + */ + columnValue: MapColumnValueSchema[]; +}; + +export type MapSchema = { + /** + * The name of the DWC sheet. + * + * @type {DWCSheetName} + */ + sheetName: DWCSheetName; + /** + * A condition, which contains one or more checks that must be met in order to proceed processing the schema element. + * + * @type {ConditionSchema} + */ + condition?: ConditionSchema; + /** + * An array of additional Schemas to add to process. Used to create additional records. + * + * @type {MapSchema[]} + */ + add?: MapSchema[]; + /** + * The schema tht defines all of the columns the be produced under this sheet. + * + * @type {MapFieldSchema[]} + */ + fields: MapFieldSchema[]; +}; + +export type DwcSchema = { + sheetName: string; + primaryKey: string[]; +}; + +export type TransformSchema = { + /** + * Defines the structure of the template, and any other relevant meta. + * + * The template, and the corresponding templateMeta definition, must correspond to a valid tree structure, with no loops. + * + * @type {TemplateMetaSchema[]} + */ + templateMeta: TemplateMetaSchema[]; + /** + * Defines the mapping from parsed raw template data to DarwinCore (DWC) templateMeta. + * + * @type {MapSchema[]} + */ + map: MapSchema[]; + /** + * Defines DWC specific meta needed by various steps of the transformation. + * + * @type {DwcSchema[]} + */ + dwcMeta: DwcSchema[]; +}; + +export type PreparedTransformSchema = TransformSchema & { + templateMeta: (TemplateMetaSchema & { distanceToRoot: number })[]; +}; + +/** + * Wraps a raw template transform config, modifying the config in preparation for use by the transformation engine, and + * providing additional helper functions for retrieving information from the config. + * + * @class XLSXTransformSchemaParser + */ +class XLSXTransformSchemaParser { + preparedTransformSchema: PreparedTransformSchema = { + templateMeta: [], + map: [], + dwcMeta: [] + }; + + /** + * Creates an instance of XLSXTransformSchemaParser. + * + * @param {TransformSchema} transformSchema + * @memberof XLSXTransformSchemaParser + */ + constructor(transformSchema: TransformSchema) { + this._processRawTransformSchema(transformSchema); + } + + /** + * Process the original transform schema, building a modified version that contains additional generated data used by + * the transform process. + * + * @param {TransformSchema} transformSchema + * @memberof XLSXTransformSchemaParser + */ + _processRawTransformSchema(transformSchema: TransformSchema) { + // prepare the `templateMeta` portion of the original transform schema + this.preparedTransformSchema.templateMeta = this._processTemplateMeta(transformSchema.templateMeta); + this.preparedTransformSchema.map = transformSchema.map; + this.preparedTransformSchema.dwcMeta = transformSchema.dwcMeta; + } + + /** + * Prepare the `templateMeta` portion of the transform schema. + * + * Recurse through the 'templateMeta' portion of the transform schema and build a modified version which has all items + * arranged in processing order (example: the root element is at index=0 in the array, etc) and where each item + * includes a new value `distanceToRoot` which indicates which tier of the tree that item is at (example: the root + * element is at `distanceToRoot=0`, its direct children are at `distanceToRoot=1`, etc) + * + * Note: This step could in be removed if the order of the transform schema was assumed to be correct by default and + * the `distanceToRoot` field was added to the type as a required field, and assumed to be set correctly. + * + * @param {TransformSchema['templateMeta']} templateMeta + * @return {*} {PreparedTransformSchema['templateMeta']} + * @memberof XLSXTransformSchemaParser + */ + _processTemplateMeta(templateMeta: TransformSchema['templateMeta']): PreparedTransformSchema['templateMeta'] { + const preparedTemplateMeta = []; + + const rootSheetSchema = Object.values(templateMeta).find((sheet) => sheet.type === 'root'); + + if (!rootSheetSchema) { + throw Error('No root template meta schema was defined'); + } + + preparedTemplateMeta.push({ ...rootSheetSchema, distanceToRoot: 0 }); + + const loop = (sheetNames: string[], distanceToRoot: number) => { + let nextSheetNames: string[] = []; + + sheetNames.forEach((sheetName) => { + const sheetSchema = Object.values(templateMeta).find((sheet) => sheet.sheetName === sheetName); + + if (!sheetSchema) { + return; + } + + preparedTemplateMeta.push({ ...sheetSchema, distanceToRoot: distanceToRoot }); + + nextSheetNames = nextSheetNames.concat(sheetSchema.foreignKeys.map((item) => item.sheetName)); + }); + + if (!nextSheetNames.length) { + return; + } + + loop(nextSheetNames, distanceToRoot + 1); + }; + + loop( + rootSheetSchema.foreignKeys.map((item) => item.sheetName), + 1 + ); + + return preparedTemplateMeta; + } + + /** + * Find and return the template meta config for a template sheet. + * + * Note: parses the `templateMeta` portion of the transform config. + * + * @param {string} sheetName + * @return {*} {(TemplateMetaSchema | undefined)} + * @memberof XLSXTransformSchemaParser + */ + getTemplateMetaConfigBySheetName(sheetName: string): TemplateMetaSchema | undefined { + return Object.values(this.preparedTransformSchema.templateMeta).find((sheet) => sheet.sheetName === sheetName); + } + + /** + * Get a list of all unique DWC sheet names. + * + * Note: parses the `map` portion of the transform config. + * + * @return {*} {string[]} + * @memberof XLSXTransformSchemaParser + */ + getDWCSheetNames(): string[] { + const names = JSONPath({ path: '$.[sheetName]', json: this.preparedTransformSchema.map }); + + return Array.from(new Set(names)); + } + + /** + * Find and return the dwc sheet keys for a DWC sheet. + * + * Note: parses the `dwcMeta` portion of the transform config. + * + * @param {string} sheetName + * @return {*} {string[]} + * @memberof XLSXTransformSchemaParser + */ + getDWCSheetKeyBySheetName(sheetName: string): string[] { + const result = JSONPath({ + path: `$..[?(@.sheetName === '${sheetName}' )][primaryKey]`, + json: this.preparedTransformSchema.dwcMeta + }); + + return result[0]; + } +} + +export default XLSXTransformSchemaParser; diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transform-schema.test.ts b/api/src/utils/media/xlsx/transformation/xlsx-transform-schema.test.ts new file mode 100644 index 0000000000..9ba0e7ee5a --- /dev/null +++ b/api/src/utils/media/xlsx/transformation/xlsx-transform-schema.test.ts @@ -0,0 +1,12 @@ +import Ajv from 'ajv'; +import { expect } from 'chai'; +import { describe } from 'mocha'; +import { transformationConfigJSONSchema } from './xlsx-transform-schema'; + +describe('transformationJSONSchema', () => { + const ajv = new Ajv(); + + it('is valid json schema', () => { + expect(ajv.validateSchema(transformationConfigJSONSchema)).to.be.true; + }); +}); diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transform-schema.ts b/api/src/utils/media/xlsx/transformation/xlsx-transform-schema.ts new file mode 100644 index 0000000000..a232b092b4 --- /dev/null +++ b/api/src/utils/media/xlsx/transformation/xlsx-transform-schema.ts @@ -0,0 +1,238 @@ +/** + * A JSON-Schema definition for a `TransformSchema`. + */ +export const transformationConfigJSONSchema = { + title: 'Transformation Schema', + type: 'object', + required: ['templateMeta', 'map', 'dwcMeta'], + properties: { + templateMeta: { + type: 'array', + description: + 'Defines the hierarchical structure of the template, which columns represent keys, and the parent-child relationship of the sheets. Used to de-normalize the template data.', + items: { + $ref: '#/$defs/TemplateMetaSchema' + } + }, + map: { + type: 'array', + description: + 'Defines the mapping operations that are executed against each flattened row of the template. Used to transform the template data into its corresponding DWC representation.', + items: { + $ref: '#/$defs/MapSchema' + } + }, + dwcMeta: { + type: 'array', + description: 'Defines the unique keys for each DWC sheet. Used to normalize the DWC data.', + items: { + $ref: '#/$defs/DwcMeta' + } + } + }, + $defs: { + TemplateMetaSchema: { + title: 'Sheet Schema', + type: 'object', + required: ['sheetName', 'primaryKey', 'parentKey', 'type', 'foreignKeys'], + properties: { + sheetName: { + type: 'string', + description: 'The name of the template sheet' + }, + primaryKey: { + type: 'array', + description: + 'An array of template column names which combined represent a unique key for rows in this sheet.', + items: { + type: 'string' + } + }, + parentKey: { + type: 'array', + description: + 'An array of template column names which combined represent a unique key for the parent row of rows in this sheet.', + items: { + type: 'string' + } + }, + type: { + type: 'string', + enum: ['root', 'leaf', ''] + }, + foreignKeys: { + type: 'array', + items: { + type: 'object', + description: 'An array of child template sheet objects.', + properties: { + sheetName: { + type: 'string', + description: 'The name of a child template sheet' + }, + primaryKey: { + type: 'array', + description: + 'An array of template column names which combined represent a unique key for child rows of this sheet.', + items: { + type: 'string', + description: 'A template column name.' + } + } + }, + additionalProperties: false + } + } + }, + additionalProperties: false + }, + MapSchema: { + title: 'Map Schema', + type: 'object', + required: ['sheetName', 'fields'], + properties: { + sheetName: { + type: 'string', + description: 'The name of the DWC sheet' + }, + condition: { + type: 'object', + description: + 'Defines a condition, which contains one or more checks that must be met in order to proceed processing this `MapSchema` item.', + properties: { + type: { + type: 'string', + enum: ['and', 'or'] + }, + checks: { + type: 'array', + items: { + type: 'object', + properties: { + ifNotEmpty: { + type: 'string' + } + }, + additionalProperties: false + } + } + }, + additionalProperties: false + }, + fields: { + type: 'array', + items: { + type: 'object', + properties: { + columnName: { + type: 'string' + }, + columnValue: { + type: 'array', + items: { + $ref: '#/$defs/MapColumnValueSchema' + } + } + }, + additionalProperties: false + } + }, + add: { + type: 'array', + items: { + $ref: '#/$defs/MapSchema' + } + } + }, + additionalProperties: false + }, + MapColumnValueSchema: { + title: 'MapColumnValueSchema', + type: 'object', + oneOf: [{ required: ['paths'] }, { required: ['static'] }], + properties: { + paths: { + type: 'array', + items: { + type: 'string' + } + }, + static: { + type: 'string' + }, + join: { + type: 'string', + description: 'A string used when concatenating columns to create keys.', + default: ':' + }, + postfix: { + type: 'object', + properties: { + paths: { + type: 'array', + items: { + type: 'string' + } + }, + static: { + type: 'string' + } + }, + additionalProperties: false + }, + condition: { + type: 'object', + description: + 'Defines a condition, which contains one or more checks that must be met in order to proceed processing this `MapColumnValueSchema` item.', + properties: { + type: { + type: 'string', + enum: ['and', 'or'] + }, + checks: { + type: 'array', + items: { + type: 'object', + properties: { + ifNotEmpty: { + type: 'string' + } + } + } + } + }, + additionalProperties: false + }, + add: { + type: 'array', + description: + 'An array of additional schemas to add to the process queue. Used to create additional records from within the context of the current schema being processed.', + items: { + $ref: '#/$defs/MapSchema' + } + } + }, + additionalProperties: false + }, + DwcMeta: { + title: 'Dwc Schema', + type: 'object', + properties: { + sheetName: { + type: 'string', + description: 'The name of the DWC sheet' + }, + primaryKey: { + type: 'array', + description: 'An array of DWC column names which combined represent a unique key for rows in this sheet.', + items: { + type: 'string', + description: 'A DWC column name.' + } + } + }, + additionalProperties: false + } + }, + additionalProperties: false +}; diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transform-utils.ts b/api/src/utils/media/xlsx/transformation/xlsx-transform-utils.ts new file mode 100644 index 0000000000..277d68e28a --- /dev/null +++ b/api/src/utils/media/xlsx/transformation/xlsx-transform-utils.ts @@ -0,0 +1,91 @@ +/** + * Iterates over an object and returns an array of all unique combinations of values. + * + * @example + * const obj = { + * 'type1': [1, 2] + * 'type2': [A, B] + * } + * + * const result = getCombinations(obj); + * + * // result = [ + * // [ 1,A ], + * // [ 1,B ], + * // [ 2,A ], + * // [ 2,B ] + * // ] + * + * @example + * const obj = { + * 'type1': [1, 2] + * 'type2': [A] + * } + * + * const result = getCombinations(obj); + * + * // result = [ + * // [ 1,A ], + * // [ 2,A ], + * // ] + * + * @param {Record>} obj + * @returns An array of all combinations of the incoming `obj` values. + */ +export function getCombinations>(obj: O) { + let combos: { [k in keyof O]: O[k][number] }[] = []; + for (const key in obj) { + const values = obj[key]; + const all: typeof combos = []; + for (const value of values) { + for (let j = 0; j < (combos.length || 1); j++) { + const newCombo = { ...combos[j], [key]: value }; + all.push(newCombo); + } + } + combos = all; + } + return combos; +} + +/** + * Filters objects from an array based on the keys provided. + * + * @example + * const arrayOfObjects = [ + * {key: 1, name: 1, value: 1}, + * {key: 1, name: 2, value: 2}, + * {key: 1, name: 2, value: 3}, + * {key: 2, name: 3, value: 4} + * ] + * + * const result = filterDuplicateKeys(arrayOfObjects, ['key']); + * + * // result = [ + * // {key: 1, name: 2, value: 3}, + * // {key: 2, name: 3, value: 4} + * // ] + * + * const result = filterDuplicateKeys(arrayOfObjects, ['key', 'name']); + * + * // result = [ + * // {key: 1, name: 1, value: 1}, + * // {key: 1, name: 2, value: 3}, + * // {key: 2, name: 3, value: 4} + * // ] + * + * @param {Record[]} arrayOfObjects + * @param {string[]} keys + * @return {*} {object[]} + * @memberof XLSXTransform + */ +export function filterDuplicateKeys(arrayOfObjects: Record[], keys: string[]): object[] { + const keyValues: [string, any][] = arrayOfObjects.map((value) => { + const key = keys.map((k) => value[k]).join('|'); + return [key, value]; + }); + + const kvMap = new Map(keyValues); + + return [...kvMap.values()]; +} diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transform.ts b/api/src/utils/media/xlsx/transformation/xlsx-transform.ts new file mode 100644 index 0000000000..451eae2682 --- /dev/null +++ b/api/src/utils/media/xlsx/transformation/xlsx-transform.ts @@ -0,0 +1,596 @@ +import jsonpatch, { Operation } from 'fast-json-patch'; +import { JSONPath, JSONPathOptions } from 'jsonpath-plus'; +import xlsx from 'xlsx'; +import { getWorksheetByName, getWorksheetRange, prepareWorksheetCells } from '../xlsx-utils'; +import XLSXTransformSchemaParser, { + ConditionSchema, + DWCColumnName, + DWCSheetName, + IfNotEmptyCheck, + JSONPathString, + TemplateColumnName, + TemplateMetaSchema, + TemplateSheetName, + TransformSchema +} from './xlsx-transform-schema-parser'; +import { filterDuplicateKeys, getCombinations } from './xlsx-transform-utils'; + +/** + * Defines a type that indicates a `Partial` value, but with some exceptions. + * + * @example + * type MyType = { + * val1: string, // required + * val2: number, // required + * val3: boolean // required + * } + * + * Partial = { + * val1?: string, // optional + * val2?: number, // optional + * val3?: noolean, // optional + * } + * + * AtLeast = { + * val1: string, // required + * val2: number, // required + * val3?: boolean // optional + * } + */ +type AtLeast = Partial & Pick; + +export type NonObjectPrimitive = string | number | null | boolean; + +export type RowObject = { + _data: { [key: string]: NonObjectPrimitive }; + _name: string; + _key: string; + _parentKey: string | ''; + _type: 'root' | 'leaf' | ''; + _row: number; + _childKeys: string[]; + _children: RowObject[]; +}; + +export class XLSXTransform { + workbook: xlsx.WorkBook; + schemaParser: XLSXTransformSchemaParser; + + _uniqueIncrement = 0; + + constructor(workbook: xlsx.WorkBook, schema: TransformSchema) { + this.workbook = workbook; + this.schemaParser = new XLSXTransformSchemaParser(schema); + } + + /** + * Run the transformation process. + * + * @memberof XLSXTransform + */ + start() { + // Prepare the raw data, by adding keys and other dwcMeta to the raw row objects + const preparedRowObjects = this.prepareRowObjects(); + + // Recurse through the data, and create a hierarchical structure for each logical record + const hierarchicalRowObjects = this.buildRowObjectsHierarchy(preparedRowObjects); + + // Iterate over the hierarchical row objects, mapping original values to their DWC equivalents + const processedHierarchicalRowObjects = this.processHierarchicalRowObjects(hierarchicalRowObjects); + + // Iterate over the Darwin Core records, group them by DWC sheet name, and remove duplicate records in each sheet + return this.prepareRowObjectsForJSONToSheet(processedHierarchicalRowObjects); + } + + /** + * Modifies the raw row objects returned by xlsx, and adds additional data (row numbers, keys, etc) that will be used + * in later steps of the transformation process. + * + * @return {*} {Record} + * @memberof XLSXTransform + */ + prepareRowObjects(): Record { + const output: Record = {}; + + this.workbook.SheetNames.forEach((sheetName) => { + const templateMetaSchema = this.schemaParser.getTemplateMetaConfigBySheetName(sheetName); + + if (!templateMetaSchema) { + // Skip worksheet as no transform schema was provided + return; + } + + const worksheet = getWorksheetByName(this.workbook, sheetName); + + // Trim all whitespace on string values + prepareWorksheetCells(worksheet); + + const range = getWorksheetRange(worksheet); + + if (!range) { + throw Error('Worksheet range is undefined'); + } + + const worksheetJSON = xlsx.utils.sheet_to_json>(worksheet, { + blankrows: false, + raw: true, + rawNumbers: false + }); + + const numberOfRows = range['e']['r']; + + const preparedRowObjects = this._prepareRowObjects(worksheetJSON, templateMetaSchema, numberOfRows); + + output[sheetName] = preparedRowObjects; + }); + + return output; + } + + _prepareRowObjects( + worksheetJSON: Record[], + templateMetaSchema: TemplateMetaSchema, + numberOfRows: number + ): RowObject[] { + const worksheetJSONWithKey: RowObject[] = []; + + for (let i = 0; i < numberOfRows; i++) { + const primaryKey = this._getKeyForRowObject(worksheetJSON[i], templateMetaSchema.primaryKey); + + if (!primaryKey) { + continue; + } + + const parentKey = this._getKeyForRowObject(worksheetJSON[i], templateMetaSchema.parentKey); + + const childKeys = templateMetaSchema.foreignKeys + .map((foreignKeys: { sheetName: TemplateColumnName; primaryKey: string[] }) => { + return this._getKeyForRowObject(worksheetJSON[i], foreignKeys.primaryKey); + }) + .filter((item): item is string => !!item); + + worksheetJSONWithKey.push({ + _data: { ...worksheetJSON[i] }, + _name: templateMetaSchema.sheetName, + _key: primaryKey, + _parentKey: parentKey, + _type: templateMetaSchema.type, + _row: i, + _childKeys: childKeys || [], + _children: [] + }); + } + + return worksheetJSONWithKey; + } + + _getKeyForRowObject(RowObject: Record, keyColumnNames: string[]): string { + if (!keyColumnNames.length) { + return ''; + } + + if (!RowObject || Object.getPrototypeOf(RowObject) !== Object.prototype || Object.keys(RowObject).length === 0) { + return ''; + } + + const primaryKey: string = keyColumnNames + .map((columnName: string) => { + return RowObject[columnName]; + }) + .filter((value) => !isNaN || value) + .join(':'); + + return primaryKey; + } + + /** + * De-normalize the original template data into a nested hierarchical object structure, based on the `templateMeta` + * portion of the transform config. + * + * @param {Record} preparedRowObjects + * @return {*} {{ _children: RowObject[] }} + * @memberof XLSXTransform + */ + buildRowObjectsHierarchy(preparedRowObjects: Record): { _children: RowObject[] } { + const hierarchicalRowObjects: { _children: RowObject[] } = { _children: [] }; + + for (const templateMetaItem of this.schemaParser.preparedTransformSchema.templateMeta) { + const sheetName = templateMetaItem.sheetName; + + const rowObjects = preparedRowObjects[sheetName]; + + if (!rowObjects) { + // No row objects for sheet + continue; + } + + const distanceToRoot = templateMetaItem.distanceToRoot; + if (distanceToRoot === 0) { + // These are root row objects, and can be added to the `hierarchicalRowObjects` array directly as they have no + // parent to be nested under + hierarchicalRowObjects._children = rowObjects; + + continue; + } + + // Add non-root row objects + for (const rowObjectsItem of rowObjects) { + const pathsToPatch: string[] = JSONPath({ + json: hierarchicalRowObjects, + path: `$${'._children[*]'.repeat(distanceToRoot - 1)}._children[?(@._childKeys.indexOf("${ + rowObjectsItem._parentKey + }") != -1)]`, + resultType: 'pointer' + }); + + if (pathsToPatch.length === 0) { + // Found no parent row object, even though this row object is a non-root row object + // This could indicate a possible error in the transform schema or the raw data + continue; + } + + const patchOperations: Operation[] = pathsToPatch.map((pathToPatch) => { + return { op: 'add', path: `${pathToPatch}/_children/`, value: rowObjectsItem }; + }); + + jsonpatch.applyPatch(hierarchicalRowObjects, patchOperations); + } + } + + return hierarchicalRowObjects; + } + + /** + * Map the original template data to their corresponding DWC terms, based on the operations in the `map` portion + * of the transform config. + * + * @param {{ + * _children: RowObject[]; + * }} hierarchicalRowObjects + * @return {*} {Record[]>[]} + * @memberof XLSXTransform + */ + processHierarchicalRowObjects(hierarchicalRowObjects: { + _children: RowObject[]; + }): Record[]>[] { + const mapRowObjects: Record[]>[] = []; + + // For each hierarchicalRowObjects + for (const hierarchicalRowObjectsItem of hierarchicalRowObjects._children) { + const flattenedRowObjects = this._flattenHierarchicalRowObject(hierarchicalRowObjectsItem); + + for (const flattenedRowObjectsItem of flattenedRowObjects) { + const result = this._mapFlattenedRowObject(flattenedRowObjectsItem as RowObject[]); + + mapRowObjects.push(result); + } + } + + return mapRowObjects; + } + + _flattenHierarchicalRowObject(hierarchicalRowObject: RowObject) { + const flattenedRowObjects: AtLeast[][] = [ + // Wrap the root element in `_children` so that the looping logic doesn't have to distinguish between the root + // element and subsequent children elements, it can just always grab the `_children`, of which the first one + // just so happens to only contain the root element. + [{ _children: [{ ...hierarchicalRowObject }] }] + ]; + + const prepGetCombinations = (source: AtLeast[]): Record => { + const prepGetCombinations: Record = {}; + + for (const sourceItem of source) { + if (sourceItem._type === 'leaf') { + // This node is marked as a leaf, so do not descend into its children. + continue; + } + + const children = sourceItem._children; + + for (const childrenItem of children) { + if (!prepGetCombinations[childrenItem._name]) { + prepGetCombinations[childrenItem._name] = []; + } + + prepGetCombinations[childrenItem._name].push(childrenItem); + } + } + + return prepGetCombinations; + }; + + const loop = (index: number, source: AtLeast[]) => { + // Grab all of the children of the current `source` and build an object in the format needed by the `getCombinations` + // function. + const preppedForGetCombinations = prepGetCombinations(source); + + // Loop over the prepped records, and build an array of objects which contain all of the possible combinations + // of the records. See function for more details. + const combinations = getCombinations(preppedForGetCombinations); + + if (combinations.length === 0) { + // No combinations elements, which means there were no children to process, indicating we've reached the end of + // the tree + return; + } + + if (combinations.length > 1) { + // This for loop is intentionally looping backwards, and stopping 1 element short of the 0'th element. + // This is because we only want to process the additional elements, pushing them onto the array, and leaving + // the code further below to handle the 0'th element, which will be set at the current `index` + for (let getCombinationsIndex = combinations.length - 1; getCombinationsIndex > 0; getCombinationsIndex--) { + let newSource: AtLeast[] = []; + for (const sourceItem of source) { + if (Object.keys(sourceItem).length <= 1) { + continue; + } + newSource.push({ ...sourceItem, _children: [] }); + } + newSource = newSource.concat(Object.values(combinations[getCombinationsIndex])); + flattenedRowObjects.push(newSource); + } + } + + // Handle the 0'th element of `combinations`, setting the `newSource` at whatever the current `index` is + let newSource: AtLeast[] = []; + for (const sourceItem of source) { + if (Object.keys(sourceItem).length <= 1) { + continue; + } + newSource.push({ ...sourceItem, _children: [] }); + } + newSource = newSource.concat(Object.values(combinations[0])); + flattenedRowObjects[index] = newSource; + + // Recurse into the newSource + loop(index, newSource); + }; + + // For each element in `flattenedRowObjects`, recursively descend through its children, flattening them as we + // go. If 2 children are of the same type, then push a copy of the current `flattenedRowObjects` element onto + // the `flattenedRowObjects` array, which will be processed on the next iteration of the for loop. + for (const [flatIndex, flattenedRowObjectsItem] of flattenedRowObjects.entries()) { + loop(flatIndex, flattenedRowObjectsItem); + } + + return flattenedRowObjects; + } + + _mapFlattenedRowObject(flattenedRow: RowObject[]) { + const output: Record[]> = {}; + + const indexBySheetName: Record = {}; + + const mapSchema = [...this.schemaParser.preparedTransformSchema.map]; + + // For each sheet + for (const mapSchemaItem of mapSchema) { + // Check conditions, if any + const sheetCondition = mapSchemaItem.condition; + if (sheetCondition) { + if (!this._processCondition(sheetCondition, flattenedRow)) { + // Conditions not met, skip processing this item + continue; + } + } + + const sheetName = mapSchemaItem.sheetName; + + if (!output[sheetName]) { + output[sheetName] = []; + indexBySheetName[sheetName] = 0; + } else { + indexBySheetName[sheetName] = indexBySheetName[sheetName] + 1; + } + + const fields = mapSchemaItem.fields; + + if (fields && fields.length) { + // For each item in the `fields` array + for (const fieldsItem of fields) { + // The final computed cell value for this particular schema field element + let cellValue = ''; + + const columnName = fieldsItem.columnName; + const columnValue = fieldsItem.columnValue; + + // For each item in the `columnValue` array + for (const columnValueItem of columnValue) { + // Check conditions, if any + const columnValueItemCondition = columnValueItem.condition; + if (columnValueItemCondition) { + if (!this._processCondition(columnValueItemCondition, flattenedRow)) { + // Conditions not met, skip processing this item + continue; + } + } + + // Check for static value + const columnValueItemValue = columnValueItem.static; + if (columnValueItemValue) { + // cell value is a static value + cellValue = columnValueItemValue; + } + + // Check for path value(s) + const columnValueItemPaths = columnValueItem.paths; + if (columnValueItemPaths) { + const pathValues = this._processPaths(columnValueItemPaths, flattenedRow); + + let pathValue = ''; + if (Array.isArray(pathValues)) { + // cell value is the concatenation of multiple values + pathValue = (pathValues.length && pathValues.flat(Infinity).join(columnValueItem.join || ':')) || ''; + } else { + // cell value is a single value + pathValue = pathValues || ''; + } + + cellValue = pathValue; + + // Add the optional postfix + const columnValueItemPostfix = columnValueItem.postfix; + if (cellValue && columnValueItemPostfix) { + let postfixValue = ''; + + if (columnValueItemPostfix.static) { + postfixValue = columnValueItemPostfix.static; + + if (columnValueItemPostfix.static === 'unique') { + postfixValue = String(this._getNextUniqueNumber()); + } + } + + if (columnValueItemPostfix.paths) { + const postfixPathValues = this._processPaths(columnValueItemPostfix.paths, flattenedRow); + + if (Array.isArray(postfixPathValues)) { + // postfix value is the concatenation of multiple values + postfixValue = + (postfixPathValues.length && postfixPathValues.join(columnValueItem.join || ':')) || ''; + } else { + // postfix value is a single value + postfixValue = postfixPathValues || ''; + } + } + + cellValue = `${cellValue}${columnValueItem.join || ':'}${postfixValue}`; + } + } + + // Check for `add` additions at the field level + const columnValueItemAdd = columnValueItem.add; + if (columnValueItemAdd && columnValueItemAdd.length) { + for (const columnValueItemAddItem of columnValueItemAdd) { + mapSchema.push(columnValueItemAddItem); + } + } + + if (cellValue) { + // One of the columnValue array items yielded a non-empty cell value, skip any remaining columnValue items. + break; + } + } + + // add the cell key value + output[sheetName][indexBySheetName[sheetName]] = { + ...output[sheetName][indexBySheetName[sheetName]], + [columnName]: cellValue + }; + } + } + + // Check for additions at the sheet level + const sheetAdds = mapSchemaItem.add; + if (sheetAdds && sheetAdds.length) { + for (const sheetAddsItem of sheetAdds) { + mapSchema.push(sheetAddsItem); + } + } + } + + return output; + } + + /** + * Process a transform config `condition`, returning `true` if the condition passed and `false` otherwise. + * + * @param {ConditionSchema} condition + * @param {RowObject[]} rowObjects + * @return {*} {boolean} `true` if the condition passed, `false` otherwise + * @memberof XLSXTransform + */ + _processCondition(condition: ConditionSchema, rowObjects: RowObject[]): boolean { + if (!condition) { + // No conditions to process + return true; + } + + const conditionsMet = new Set(); + + for (const checksItem of condition.checks) { + if (checksItem.ifNotEmpty) { + conditionsMet.add(this._processIfNotEmptyCondition(checksItem, rowObjects)); + } + } + + if (condition.type === 'or') { + return conditionsMet.has(true); + } + + return !conditionsMet.has(false); + } + + _processIfNotEmptyCondition(check: IfNotEmptyCheck, rowObjects: RowObject[]): boolean { + const pathValues = this._processPaths([check.ifNotEmpty], rowObjects); + + if (!pathValues || !pathValues.length) { + // condition failed + return false; + } + + return true; + } + + _processPaths(paths: JSONPathString[], json: JSONPathOptions['json']): string | string[] | string[][] { + if (paths.length === 0) { + return ''; + } + + if (paths.length === 1) { + return JSONPath({ path: paths[0], json: json }) || ''; + } + + const values = []; + + for (const pathsItem of paths) { + const value = JSONPath({ path: pathsItem, json: json }) || ''; + + if (value) { + values.push(value); + } + } + + return values; + } + + /** + * Groups all of the DWC records based on DWC sheet name. + * + * @param {Record[]>[]} processedHierarchicalRowObjects + * @return {*} {Record[]>} + * @memberof XLSXTransform + */ + prepareRowObjectsForJSONToSheet( + processedHierarchicalRowObjects: Record[]>[] + ): Record[]> { + const groupedByDWCSheetName: Record[]> = {}; + const uniqueGroupedByDWCSheetName: Record[]> = {}; + + const dwcSheetNames = this.schemaParser.getDWCSheetNames(); + + dwcSheetNames.forEach((sheetName) => { + groupedByDWCSheetName[sheetName] = []; + uniqueGroupedByDWCSheetName[sheetName] = []; + }); + + processedHierarchicalRowObjects.forEach((item) => { + const entries = Object.entries(item); + for (const [key, value] of entries) { + groupedByDWCSheetName[key] = groupedByDWCSheetName[key].concat(value); + } + }); + + Object.entries(groupedByDWCSheetName).forEach(([key, value]) => { + const keys = this.schemaParser.getDWCSheetKeyBySheetName(key); + uniqueGroupedByDWCSheetName[key] = filterDuplicateKeys(value, keys) as any; + }); + + return uniqueGroupedByDWCSheetName; + } + + _getNextUniqueNumber(): number { + return this._uniqueIncrement++; + } +} diff --git a/api/src/utils/media/xlsx/transformation/xlsx-transformation.ts b/api/src/utils/media/xlsx/transformation/xlsx-transformation.ts deleted file mode 100644 index 3347a1c2d6..0000000000 --- a/api/src/utils/media/xlsx/transformation/xlsx-transformation.ts +++ /dev/null @@ -1,633 +0,0 @@ -import equal from 'fast-deep-equal'; -import { uniqWith } from 'lodash'; -import xlsx from 'xlsx'; -import { CSVWorksheet } from '../../csv/csv-file'; -import { XLSXCSV } from '../xlsx-file'; -import { - Condition, - FlattenSchema, - PostTransformationRelatopnshipSchema, - TransformationFieldSchema, - TransformationSchemaParser, - TransformSchema -} from './transformation-schema-parser'; - -export type FlattenedRowPartsBySourceFile = { - sourceFile: string; - uniqueId: any; - row: object; -}; - -export type RowObject = { [key: string]: any }; - -export type RowsObjectsByFileName = { [key: string]: RowObject[] }; - -export type XLSXWorksheetByFileName = { [key: string]: xlsx.WorkSheet }; - -/** - * Applies transformations to an `XLSXCSV` instance. - * - * @export - * @class XLSXTransformation - */ -export class XLSXTransformation { - transformationSchemaParser: TransformationSchemaParser; - xlsxCsv: XLSXCSV; - - constructor(transformationSchemaParser: TransformationSchemaParser, xlsxCsv: XLSXCSV) { - this.transformationSchemaParser = transformationSchemaParser; - this.xlsxCsv = xlsxCsv; - } - - /** - * Transform the raw XLSX data. - * - * @return {*} {RowsObjectsByFileName} - * @memberof XLSXTransformation - */ - async transform(): Promise { - const flattenedData = this._flattenData(); - - const mergedFlattenedData = this._mergedFlattenedRows(flattenedData); - - const transformedMergedFlattenedData = this._transformFlattenedData(mergedFlattenedData); - - const parsedTransformedMergedFlattenedData = this._parseTransformedData(transformedMergedFlattenedData); - - return this._mergeParsedData(parsedTransformedMergedFlattenedData); - } - - /** - * Flattens the worksheet data into arrays of objects. - * - * @return {*} {FlattenedRowPartsBySourceFile[][]} - * @memberof XLSXTransformation - */ - _flattenData(): FlattenedRowPartsBySourceFile[][] { - let rowsBySourceFileArray: FlattenedRowPartsBySourceFile[][] = []; - - // Get all flatten schemas - const flattenSchemas = this.transformationSchemaParser.getAllFlattenSchemas(); - - // Build an array of [worksheetName, worksheet] based on the order of the flatten schemas. This is necessary - // because the flattening process requires parsing the worksheets in a specific order, as specified by the flatten - // section of the transformation schema. - const orderedWorksheetsByFlattenSchema: [string, CSVWorksheet][] = []; - flattenSchemas.forEach((flattenSchema) => { - const worksheet = this.xlsxCsv.workbook.worksheets[flattenSchema.fileName]; - - if (worksheet) { - orderedWorksheetsByFlattenSchema.push([flattenSchema.fileName, worksheet]); - } - }); - - // Iterate over each worksheet in the ordered array of worksheets - orderedWorksheetsByFlattenSchema.forEach(([worksheetName, worksheet]) => { - // Get the flatten file structure schema for the worksheet, based on the worksheet name - const flattenSchema = this.transformationSchemaParser.getFlattenSchemas(worksheetName); - - if (!flattenSchema) { - // No schema for this worksheet, skip it - return; - } - - // Get all worksheet rows as an array of objects - const rowObjects = worksheet.getRowObjects(); - - if (!flattenSchema.parent) { - // Handle root records, that have no parent record - const flattenedRootRecords = this._flattenRootRecords(flattenSchema, worksheet, rowObjects); - rowsBySourceFileArray = rowsBySourceFileArray.concat(flattenedRootRecords); - } else { - // Handle child records, that have a parent record - const parentFileName = flattenSchema.parent.fileName.toLowerCase(); - const parentUniqueIdColumns = flattenSchema.parent.uniqueId; - - const childFileName = flattenSchema.fileName; - const childUniqueIdColumns = flattenSchema.uniqueId; - - rowObjects.forEach((rowObject, rowIndex) => { - const parentUniqueId = this._buildMultiColumnID(worksheet, rowIndex, parentUniqueIdColumns).toLowerCase(); - - const uniqueId = this._buildMultiColumnID(worksheet, rowIndex, childUniqueIdColumns); - - const newRecord = { - sourceFile: childFileName, - uniqueId: uniqueId, - row: rowObject - }; - - // An array of indexes that tracks which records to add `newRecord` to, and which records should be duplicated - // before adding `newRecord` to them. - const recordsToModify: { matchingParentIndex: number; matchingChildIndex: number }[] = []; - let recordsToModifyIndex = 0; - - let foundRowToModify = false; - - // For each parent array of child arrays of objects - rowsBySourceFileArray.forEach((rowsBySourceFile, rowsBySourceFileIndex) => { - if (foundRowToModify) { - return; - } - - let foundRecordToModify = false; - - /* - * Compare the `newRecord` to each object in the child array - * If a matching parent record is found - * - mark this parent array index - * If a matching child record from the same sourceFile is found - * - mark this child array index - */ - rowsBySourceFile.forEach((rowBySourceFile, rowBySourceFileIndex) => { - const existingRowFileName = rowBySourceFile.sourceFile.toLowerCase(); - const existingRowUniqueId = rowBySourceFile.uniqueId.toLowerCase(); - - if (existingRowFileName === parentFileName && existingRowUniqueId === parentUniqueId) { - // This array may need a copy of `newRecord` - recordsToModify[recordsToModifyIndex] = { - ...recordsToModify[recordsToModifyIndex], - matchingParentIndex: rowsBySourceFileIndex - }; - - foundRecordToModify = true; - } else if (existingRowFileName === childFileName.toLowerCase()) { - // This array already contains a record from the same file as `newRecord` and will need to be duplicated - recordsToModify[recordsToModifyIndex] = { - ...recordsToModify[recordsToModifyIndex], - matchingChildIndex: rowBySourceFileIndex - }; - - foundRecordToModify = true; - } - }); - - if (foundRecordToModify) { - if ( - recordsToModify[recordsToModifyIndex].matchingParentIndex >= 0 && - recordsToModify[recordsToModifyIndex].matchingChildIndex >= 0 - ) { - // A matching parent row with matching child was found, don't continue checking other rows - foundRowToModify = true; - } - // A record was found after iterating over the previous array, increase the index before we loop over - // the next array. - recordsToModifyIndex++; - } - }); - - // For each `recordsToModify` - // Apply updates to the existing records based on the `recordsToModify` array. - recordsToModify.forEach((recordToModify) => { - if (recordToModify.matchingParentIndex >= 0 && recordToModify.matchingChildIndex >= 0) { - /* - * `recordToModify` indicates that a matching parent was found AND a matching child from the same - * sourceFile was found. Duplicate the array, and in the duplicated array, overwrite the existing - * matching child with the `newRecord`. - * - * Example: - * - * Initial state: - * - * newRecord = {sourceFile: 'file2', uniqueId: 3, row: {...}}; - * - * rowsBySourceFileArray = [ - * [ - * {sourceFile: 'file1', uniqueId: 1, row: {...}}, // matching parent of `newRecord` - * {sourceFile: 'file2', uniqueId: 2, row: {...}} // matching child from same sourceFile as `newRecord` - * ] - * ] - * - * Final state: - * - * rowsBySourceFileArray = [ - * [ - * {sourceFile: 'file1', uniqueId: 1, row: {...}}, - * {sourceFile: 'file2', uniqueId: 2, row: {...}} - * ], - * [ - * {sourceFile: 'file1', uniqueId: 1, row: {...}}, - * {sourceFile: 'file2', uniqueId: 3, row: {...}} - * ] - * ] - */ - - // Copy the existing items into a new array - const newRowRecord = [...rowsBySourceFileArray[recordToModify.matchingParentIndex]]; - - // Overwrite the matching item at index `matchingChildIndex` with our new record - newRowRecord[recordToModify.matchingChildIndex] = newRecord; - - // Append this new duplicated record to the parent array - rowsBySourceFileArray.push(newRowRecord); - } else if (recordToModify.matchingParentIndex >= 0) { - /* - * `recordToModify` indicates that a matching parent was found. Add the `newRecord` to this existing - * array. - * - * Example: - * - * Initial state: - * - * newRecord = {sourceFile: 'file2', uniqueId: 3, row: {...}}; - * - * rowsBySourceFileArray = [ - * [ - * {sourceFile: 'file1', uniqueId: 1, row: {...}} // matching parent of `newRecord` - * ] - * ] - * - * Final state: - * - * rowsBySourceFileArray = [ - * [ - * {sourceFile: 'file1', uniqueId: 1, row: {...}}, - * {sourceFile: 'file2', uniqueId: 3, row: {...}} - * ] - * ] - */ - rowsBySourceFileArray[recordToModify.matchingParentIndex].push(newRecord); - } - }); - }); - } - }); - - return rowsBySourceFileArray; - } - - _flattenRootRecords( - flattenSchema: FlattenSchema, - worksheet: CSVWorksheet, - rowObjects: object[] - ): FlattenedRowPartsBySourceFile[][] { - const newRecords: FlattenedRowPartsBySourceFile[][] = []; - - rowObjects.forEach((rowObject, rowIndex) => { - const uniqueId = this._buildMultiColumnID(worksheet, rowIndex, flattenSchema.uniqueId); - - const newRecord = { - sourceFile: flattenSchema.fileName, - uniqueId: uniqueId, - row: rowObject - }; - - newRecords.push([newRecord]); - }); - - return newRecords; - } - - _buildMultiColumnID(worksheet: CSVWorksheet, rowIndex: number, columnNames: string[]) { - return this._buildMultiColumnValue(worksheet, rowIndex, columnNames, ':'); - } - - _buildMultiColumnValue(worksheet: CSVWorksheet, rowIndex: number, columnNames: string[], separator?: string) { - return columnNames.map((columnName) => worksheet.getCell(columnName, rowIndex)).join(separator || ' '); - } - - /** - * Merges the arrays of objects into an array of objects. - * - * @param {FlattenedRowPartsBySourceFile[][]} flattenedData - * @return {*} {object[][]} - * @memberof XLSXTransformation - */ - _mergedFlattenedRows(flattenedData: FlattenedRowPartsBySourceFile[][]): object[] { - const mergedAndFlattenedRows: object[] = []; - - flattenedData.forEach((rowsBySourceFile, index) => { - rowsBySourceFile.forEach((rowPart) => { - mergedAndFlattenedRows[index] = { ...mergedAndFlattenedRows[index], ...rowPart.row }; - }); - }); - - return mergedAndFlattenedRows; - } - - /** - * Applies transformation logic to the flattened array of objects, creating a new array of objects (which may contain - * duplicate items) - * - * @param {object[]} mergedFlattenedData - * @return {*} {object[]} - * @memberof XLSXTransformation - */ - _transformFlattenedData(mergedFlattenedData: object[]): object[] { - const transformSchemasArray = this.transformationSchemaParser.getTransformSchemas(); - - let transformedDWCData: object[] = []; - - mergedFlattenedData.forEach((rowObject, rowObjectIndex) => { - transformSchemasArray.forEach((transformationSchema, transformationSchemaIndex) => { - const newDWCRowObjects = this._applyFileTransformations( - rowObject, - transformationSchema, - rowObjectIndex, - transformationSchemaIndex - ); - - transformedDWCData = transformedDWCData.concat(newDWCRowObjects); - }); - }); - - return transformedDWCData; - } - - _applyFileTransformations( - rowObject: object, - transformationSchema: TransformSchema, - rowObjectIndex: number, - transformationSchemaIndex: number - ): object[] { - if (!this._isConditionMet(rowObject, transformationSchema?.condition)) { - // condition not met, return an empty array (contains no new records) - return []; - } - - let newDWCRowObjects: object[] = []; - - transformationSchema.transformations.forEach((transformation) => { - const newDWCRowObject = {}; - - if (!this._isConditionMet(rowObject, transformation?.condition)) { - return; - } - - Object.entries(transformation.fields).forEach(([fieldName, config]) => { - if (!this._isConditionMet(rowObject, config?.condition)) { - return; - } - - let columnValue = this._getColumnValue(rowObject, config); - - if (config.unique) { - // Append `config.unique` + indexes to ensure this column value is unique - columnValue = `${columnValue}:${config.unique}-${rowObjectIndex}-${transformationSchemaIndex}`; - } - - newDWCRowObject[fieldName] = columnValue; - }); - - newDWCRowObjects.push(newDWCRowObject); - }); - - transformationSchema?.postTransformations?.forEach((postTransformation) => { - if (!this._isConditionMet(rowObject, postTransformation.condition)) { - return; - } - - if (postTransformation.relationship) { - newDWCRowObjects = this._postTransformRelationships( - postTransformation as PostTransformationRelatopnshipSchema, - newDWCRowObjects - ); - } - }); - - return newDWCRowObjects; - } - - _postTransformRelationships = ( - postTransformRelationshipSchema: PostTransformationRelatopnshipSchema, - originalDWCRowObjects: object[] - ) => { - // Spread the parent/child row objects and update relationship fields - - const spreadColumn = postTransformRelationshipSchema.relationship.spreadColumn; - const uniqueIdColumn = postTransformRelationshipSchema.relationship.uniqueIdColumn; - - let spreadDWCRowObjects: object[] = []; - - if (spreadColumn) { - const originalParentRecord = originalDWCRowObjects[0]; - const originalChildRecord = originalDWCRowObjects[1]; - - const spreadColumnValue = Number(originalParentRecord[spreadColumn]); - - if (spreadColumnValue) { - for (let i = 0; i < spreadColumnValue; i++) { - const newParentRecord = { - ...originalParentRecord, - [spreadColumn]: 1, - [uniqueIdColumn]: `${originalParentRecord[uniqueIdColumn]}-${i}-0` - }; - const newChildRecord = { - ...originalChildRecord, - [uniqueIdColumn]: `${originalChildRecord[uniqueIdColumn]}-${i}-1` - }; - - newParentRecord['resourceID'] = newParentRecord[uniqueIdColumn]; - newParentRecord['relatedResourceID'] = newChildRecord[uniqueIdColumn]; - - newChildRecord['resourceID'] = newChildRecord[uniqueIdColumn]; - newChildRecord['relatedResourceID'] = newParentRecord[uniqueIdColumn]; - - spreadDWCRowObjects = spreadDWCRowObjects.concat([newParentRecord, newChildRecord]); - } - } - } - - return spreadDWCRowObjects; - }; - - /** - * Builds a new value from the `rowObject`, based on the config. - * - * This may involve returning a single `rowObject` value, concatenating multiple `rowObject` values together, or - * returning a static value. - * - * @param {object} rowObject - * @param {(TransformationFieldSchema | undefined)} config - * @return {*} {*} - * @memberof XLSXTransformation - */ - _getColumnValue(rowObject: object, config: TransformationFieldSchema | undefined): any { - if (!config) { - return; - } - - let columnValue = undefined; - - if (config.columns) { - const columnsValues = this._getColumnValueParts(rowObject, config.columns); - - if (columnsValues && columnsValues.length) { - columnValue = columnsValues.join(config?.separator || ' '); - } - } - - if (config.value) { - columnValue = config.value; - } - - return columnValue; - } - - /** - * Given an array of column names, return an array of matching column values. - * - * @param {object} rowObject - * @param {string[]} columnNames - * @return {*} {((string | number)[] | undefined)} - * @memberof XLSXTransformation - */ - _getColumnValueParts(rowObject: object, columnNames: string[]): (string | number)[] | undefined { - if (!rowObject || !columnNames || !columnNames.length) { - return undefined; - } - - const columnValueParts: any[] = []; - - columnNames.forEach((columnName) => { - const columnValue = rowObject[columnName]; - - if (columnValue !== undefined && columnValue !== null && columnValue !== '') { - columnValueParts.push(columnValue); - } - }); - - return columnValueParts; - } - - /** - * Returns true if the `condition` is met. - * - * @param {object} rowObject - * @param {(Condition | undefined)} condition - * @return {*} {boolean} - * @memberof XLSXTransformation - */ - _isConditionMet(rowObject: object, condition?: Condition): boolean { - if (!condition) { - // no conditions specified - return true; - } - - const columnValueParts = this._getColumnValueParts(rowObject, condition.if.columns); - - if (!columnValueParts || !columnValueParts.length) { - if (condition.if.not) { - // return true if no condition column values are defined, when condition is inverted - return true; - } - - // return false if no condition column values are defined - return false; - } - - if (condition.if.not) { - // return false if any condition column values are defined, when condition is inverted - return false; - } - - // return true if all condition columns are defined - return !columnValueParts.every( - (columnValuePart) => columnValuePart === undefined || columnValuePart === null || columnValuePart === '' - ); - } - - /** - * Parses the array of objects into separate arrays of objects (which may contain duplicate items), based on fileName. - * - * @param {object[]} transformedMergedFlattenedData - * @return {*} {RowsObjectsByFileName} - * @memberof XLSXTransformation - */ - _parseTransformedData(transformedMergedFlattenedData: object[]): RowsObjectsByFileName { - const parseSchemas = this.transformationSchemaParser.getParseSchemas(); - - const parsedDWCData: RowsObjectsByFileName = {}; - - parseSchemas.forEach((parseSchema) => { - const fileName = parseSchema.fileName; - const columns = parseSchema.columns; - - if (!parsedDWCData[fileName]) { - // initialize an empty array for the current fileName if one does not yet exist - parsedDWCData[fileName] = []; - } - - transformedMergedFlattenedData.forEach((rowObject) => { - if (!this._isConditionMet(rowObject, parseSchema?.condition)) { - // A conditional field was undefined or null, skip this record - return; - } - - const newRowObject = {}; - - for (const column of columns) { - if (column.source.columns && column.source.columns?.length) { - // iterate over source columns - for (const sourceColumn of column.source.columns) { - const sourceValue = rowObject[sourceColumn]; - - if (sourceValue) { - // use the first source column that has a defined value - newRowObject[column.target] = sourceValue; - break; - } - } - } else if (column.source.value) { - newRowObject[column.target] = column.source.value; - } - } - - if (!Object.keys(newRowObject).length) { - // row object is empty, skip - return; - } - - parsedDWCData[fileName].push(newRowObject); - }); - }); - - return parsedDWCData; - } - - /** - * Merges the array of objects for each fileName, removing duplicate items. - * - * @param {RowsObjectsByFileName} parsedDWCData - * @return {*} {RowsObjectsByFileName} - * @memberof XLSXTransformation - */ - async _mergeParsedData(parsedTransformedMergedFlattenedData: RowsObjectsByFileName): Promise { - // For each entry (based on fileName), do a deep equality check on each of its row objects, removing any duplicates. - - const breathSpace = async (delaySeconds: number) => - new Promise((resolve) => setTimeout(() => resolve(), delaySeconds * 1000)); - - const entries = Object.entries(parsedTransformedMergedFlattenedData); - - const duplicatesRemovedResponse = parsedTransformedMergedFlattenedData; - - for (const entry of entries) { - const [fileName, rowObjects] = entry; - - duplicatesRemovedResponse[fileName] = uniqWith(rowObjects, equal); - await breathSpace(1); - } - - return duplicatesRemovedResponse; - } - - /** - * Converts an object (whose keys are file names, and whose value is an array of objects) into a new object (whose - * keys are file names, and whose value is an `xlsx.Worksheet`). - * - * @param {RowsObjectsByFileName} mergedParsedData - * @return {*} {XLSXWorksheetByFileName} - * @memberof XLSXTransformation - */ - dataToSheet(mergedParsedTransformedData: RowsObjectsByFileName): XLSXWorksheetByFileName { - const sheets: XLSXWorksheetByFileName = {}; - - Object.entries(mergedParsedTransformedData).forEach(([fileName, rowObjects]) => { - const worksheet = xlsx.utils.json_to_sheet(rowObjects); - sheets[fileName] = worksheet; - }); - - return sheets; - } -} diff --git a/api/src/utils/media/xlsx/validation/xlsx-validation.test.ts b/api/src/utils/media/xlsx/validation/xlsx-validation.test.ts new file mode 100644 index 0000000000..2fb3ffd81c --- /dev/null +++ b/api/src/utils/media/xlsx/validation/xlsx-validation.test.ts @@ -0,0 +1,269 @@ +import { expect } from 'chai'; +import { describe } from 'mocha'; +import XLSX from 'xlsx'; +import { CSVWorkBook } from '../../csv/csv-file'; +import { getParentChildKeyMatchValidator } from './xlsx-validation'; + +const makeMockWorkbook = () => { + const mockWorkbook = XLSX.utils.book_new(); + // First sheet + XLSX.utils.book_append_sheet( + mockWorkbook, + XLSX.utils.json_to_sheet([ + { column1: 'column1-row1', column2: 'column2-row1', column4: 'A', column5: 'A' }, + { column1: 'column1-row2', column2: 'column2-row2', column4: 'B', column5: 'B' }, + { column1: 'column1-row3', column2: 'column2-row3', column4: 'C', column5: 'C' }, + { column1: 'column1-row4', column2: 'column2-row4', column4: 'D', column5: 'D' } + ]), + 'parent_sheet' + ); + + // Second sheet + XLSX.utils.book_append_sheet( + mockWorkbook, + XLSX.utils.json_to_sheet([ + { column1: 'column1-row1', column2: 'column2-row1', column3: 'column3-row1', column4: 'A' }, + { column1: 'column1-row2', column2: 'column2-row2', column3: 'column3-row2', column4: 'D' }, + { column1: 'column1-row3', column2: 'column2-row3', column3: 'column3-row3', column4: 'E' } + ]), + 'child_sheet' + ); + + return new CSVWorkBook(mockWorkbook); +}; + +describe('getParentChildKeyMatchValidator', async () => { + it('should not add errors when config is not provided', async () => { + const validator = getParentChildKeyMatchValidator(); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if no column names provided', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: [] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if empty child sheet string is provided', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: '', + parent_worksheet_name: 'parent_sheet', + column_names: ['column1'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if empty parent sheet string is provided', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: '', + column_names: ['column1'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if the provided parent sheet name is not found in the workbook', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'unknown_sheet_name', + column_names: ['column1'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if the provided child sheet name is not found in the workbook', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'unknown_sheet_name', + parent_worksheet_name: 'parent_sheet', + column_names: ['column1'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if no dangling indices are found for a single column', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column2'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if no dangling indices are found for multiple columns', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column1', 'column2'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should not add errors if parent column happens to contain serialized child column values', async () => { + const workbook = XLSX.utils.book_new(); + // First sheet + XLSX.utils.book_append_sheet(workbook, XLSX.utils.json_to_sheet([{ column1: 'A|B', column2: '' }]), 'parent_sheet'); + + // Second sheet + XLSX.utils.book_append_sheet(workbook, XLSX.utils.json_to_sheet([{ column1: 'A', column2: 'B|' }]), 'child_sheet'); + + const mockWorkbook = new CSVWorkBook(workbook); + + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column1', 'column2'] + } + }); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + expect(child_sheet.csvValidation.keyErrors).to.eql([ + { + errorCode: 'Missing Child Key from Parent', + colNames: ['column1', 'column2'], + message: 'child_sheet[column1, column2] must have matching value in parent_sheet[column1, column2].', + rows: [2] + } + ]); + }); + + it('should add errors if a column name is absent from the parent sheet but present in the child sheet', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column2', 'column3'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + expect(child_sheet.csvValidation.keyErrors).to.eql([ + { + colNames: ['column2', 'column3'], + errorCode: 'Missing Child Key from Parent', + message: 'child_sheet[column2, column3] must have matching value in parent_sheet[column2, column3].', + rows: [2, 3, 4] + } + ]); + }); + + it('should not add errors if a column name is absent from the child sheet but present in the parent sheet', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column2', 'column5'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + expect(child_sheet.csvValidation.keyErrors).to.eql([]); + }); + + it('should only add a given error to the child sheet and not the parent', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column3'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet, parent_sheet } = mockWorkbook.worksheets; + expect(parent_sheet.csvValidation.keyErrors).to.eql([]); + expect(child_sheet.csvValidation.keyErrors).to.eql([ + { + colNames: ['column3'], + errorCode: 'Missing Child Key from Parent', + message: 'child_sheet[column3] must have matching value in parent_sheet[column3].', + rows: [2, 3, 4] + } + ]); + }); + + it('should only include rows containing a dangling key in the child sheet in key errors', async () => { + const validator = getParentChildKeyMatchValidator({ + workbook_parent_child_key_match_validator: { + child_worksheet_name: 'child_sheet', + parent_worksheet_name: 'parent_sheet', + column_names: ['column4'] + } + }); + const mockWorkbook = makeMockWorkbook(); + validator(mockWorkbook); + + const { child_sheet } = mockWorkbook.worksheets; + expect(child_sheet.csvValidation.keyErrors).to.eql([ + { + colNames: ['column4'], + errorCode: 'Missing Child Key from Parent', + message: 'child_sheet[column4] must have matching value in parent_sheet[column4].', + rows: [4] + } + ]); + }); +}); diff --git a/api/src/utils/media/xlsx/validation/xlsx-validation.ts b/api/src/utils/media/xlsx/validation/xlsx-validation.ts new file mode 100644 index 0000000000..2cd9c186b6 --- /dev/null +++ b/api/src/utils/media/xlsx/validation/xlsx-validation.ts @@ -0,0 +1,111 @@ +import { SUBMISSION_MESSAGE_TYPE } from '../../../../constants/status'; +import { safeTrim } from '../../../string-utils'; +import { CSVWorkBook, WorkBookValidator } from '../../csv/csv-file'; + +export type ParentChildKeyMatchValidatorConfig = { + workbook_parent_child_key_match_validator: { + description?: string; + child_worksheet_name: string; + parent_worksheet_name: string; + column_names: string[]; + }; +}; + +/** + * For a specified parent sheet, child sheet, and set of parent and child columns, adds an error on each cell in the + * child sheet whose key in the corresponding row belonging to the parent sheet cannot be found. + * + * Note: If the cell is empty, this check will be skipped. Use the `getRequiredFieldsValidator` validator to assert + * required fields. + * + * @param {ParentChildKeyMatchValidatorConfig} [config] The validator config + * @return {*} {WorkBookValidator} The workbook validator + * + */ +export const getParentChildKeyMatchValidator = (config?: ParentChildKeyMatchValidatorConfig): WorkBookValidator => { + return (csvWorkbook: CSVWorkBook) => { + if (!config) { + return csvWorkbook; + } + const { + child_worksheet_name, + parent_worksheet_name, + column_names + } = config.workbook_parent_child_key_match_validator; + + const parentWorksheet = csvWorkbook.worksheets[parent_worksheet_name]; + const childWorksheet = csvWorkbook.worksheets[child_worksheet_name]; + + if (!parentWorksheet || !childWorksheet) { + return csvWorkbook; + } + + const parentRowObjects = parentWorksheet.getRowObjects(); + const childRowObjects = childWorksheet.getRowObjects(); + + // If there are no children rows found, leave early + if (!childRowObjects.length) { + return csvWorkbook; + } + + // Filter column names to only check key violation on columns included in the child sheet + const filteredColumnNames = column_names.filter((columnName) => Boolean(childRowObjects[0][columnName])); + + /** + * Encodes the column values for a worksheet at a given row into a string, which is used for comparison with another worksheet + * @param {object} rowObject A record reflecting a row in a tbale + * @returns {*} {string} The row objected encoded as a string + */ + const serializer = (rowObject: object): string => { + return ( + filteredColumnNames + // Retrieve the value from each column + .map((columnName: string) => String(rowObject[columnName])) + + // Remove empty column values + .filter(Boolean) + + // Trim whitespace + .map(safeTrim) + + // Deliminate column values + .join('|') + ); + }; + + const parentSerializedRows = parentRowObjects.map(serializer); + + // Add an error for each cell containing a dangling key reference in the child worksheet + const danglingRowIndices = childRowObjects + // Serialize each row in order to match column values + .map(serializer) + + // Maps a row index to `-1`, if and only if the given row has a matching row in the parent + .map((serializedRow: string, rowIndex: number) => { + return !serializedRow || parentSerializedRows.includes(serializedRow) ? -1 : rowIndex; + }) + + // Filter any row indices which have a matching row in the parent + .filter((rowIndex: number) => rowIndex >= 0) + + // Add +2 to the index to reflect the actual row number in the file + .map((index: number) => index + 2); + + if (danglingRowIndices.length === 0) { + return csvWorkbook; + } + + // For any and all of the remaining 'dangling' row indices, insert a single key error reflecting the missing keys from the parent. + const columnNameIndexString = `[${column_names.join(', ')}]`; + childWorksheet.csvValidation.addKeyErrors([ + { + errorCode: SUBMISSION_MESSAGE_TYPE.DANGLING_PARENT_CHILD_KEY, + message: `${child_worksheet_name}${columnNameIndexString} must have matching value in ${parent_worksheet_name}${columnNameIndexString}.`, + colNames: column_names, + rows: danglingRowIndices + } + ]); + + return csvWorkbook; + }; +}; diff --git a/api/src/utils/media/xlsx/xlsx-file.ts b/api/src/utils/media/xlsx/xlsx-file.ts index 28cd313346..c3ce15e527 100644 --- a/api/src/utils/media/xlsx/xlsx-file.ts +++ b/api/src/utils/media/xlsx/xlsx-file.ts @@ -22,39 +22,68 @@ export class XLSXCSV { this.mediaValidation = new MediaValidation(this.rawFile.fileName); - this.workbook = new CSVWorkBook(xlsx.read(this.rawFile.buffer, { ...options })); + this.workbook = new CSVWorkBook( + // See https://www.npmjs.com/package/xlsx#parsing-options for details on parsing options + xlsx.read(this.rawFile.buffer, { cellDates: true, cellNF: true, cellHTML: false, ...options }) + ); } - isMediaValid(validationSchemaParser: ValidationSchemaParser): IMediaState { + /** + * Runs all media-related validation for this CSV file, based on given validation schema parser. + * @param validationSchemaParser The validation schema + * @returns {*} {void} + * @memberof XLSXCSV + */ + validateMedia(validationSchemaParser: ValidationSchemaParser): void { const validators = validationSchemaParser.getSubmissionValidations(); - const mediaValidation = this.validate(validators as XLSXCSVValidator[]); - - return mediaValidation.getState(); + this.validate(validators as XLSXCSVValidator[]); } - isContentValid(validationSchemaParser: ValidationSchemaParser): ICsvState[] { - const csvStates: ICsvState[] = []; + /** + * Runs all content and workbook-related validation for this CSV file, based on the given validation + * schema parser. + * + * @param {ValidationSchemaParser} validationSchemaParser The validation schema + * @return {*} {void} + * @memberof XLSXCSV + */ + validateContent(validationSchemaParser: ValidationSchemaParser): void { + // Run workbook validators. + const workbookValidators = validationSchemaParser.getWorkbookValidations(); + this.workbook.validate(workbookValidators); - Object.keys(this.workbook.worksheets).forEach((fileName) => { + // Run content validators. + Object.entries(this.workbook.worksheets).forEach(([fileName, worksheet]) => { const fileValidators = validationSchemaParser.getFileValidations(fileName); - const columnValidators = validationSchemaParser.getAllColumnValidations(fileName); - const validators = [...fileValidators, ...columnValidators]; + const validationRules = [...fileValidators, ...columnValidators]; - const worksheet: CSVWorksheet = this.workbook.worksheets[fileName]; - - if (!worksheet) { - return; + if (validationRules.length) { + worksheet.validate(validationRules); } - - const csvValidation = worksheet.validate(validators); - - csvStates.push(csvValidation.getState()); }); + } + + /** + * Returns the current media state belonging to the CSV file. + * @returns {*} {IMediaState} The state of the CSV media. + * @memberof XLSXCSV + */ + getMediaState(): IMediaState { + return this.mediaValidation.getState(); + } - return csvStates; + /** + * Returns the current CSV states belonging to all worksheets in the CSV file. + * @returns {*} {ICsvState[]} The state of each worksheet in the CSV file. + * @memberof XLSXCSV + */ + getContentState(): ICsvState[] { + return Object.values(this.workbook.worksheets) + .map((worksheet: CSVWorksheet) => worksheet.csvValidation.getState()) + .filter(Boolean); } worksheetToBuffer(worksheet: xlsx.WorkSheet): Buffer { diff --git a/api/src/utils/media/xlsx/xlsx-utils.ts b/api/src/utils/media/xlsx/xlsx-utils.ts new file mode 100644 index 0000000000..8def99bf7e --- /dev/null +++ b/api/src/utils/media/xlsx/xlsx-utils.ts @@ -0,0 +1,89 @@ +import xlsx, { CellObject } from 'xlsx'; +import { safeTrim } from '../../string-utils'; + +/** + * Get a worksheet by name. + * + * @export + * @param {xlsx.WorkBook} workbook + * @param {string} sheetName + * @return {*} {xlsx.WorkSheet} + */ +export function getWorksheetByName(workbook: xlsx.WorkBook, sheetName: string): xlsx.WorkSheet { + return workbook.Sheets[sheetName]; +} + +/** + * Get a worksheets decoded range object, or return undefined if the worksheet is missing range information. + * + * @export + * @param {xlsx.WorkSheet} worksheet + * @return {*} {(xlsx.Range | undefined)} + */ +export function getWorksheetRange(worksheet: xlsx.WorkSheet): xlsx.Range | undefined { + const ref = worksheet['!ref']; + + if (!ref) { + return undefined; + } + + return xlsx.utils.decode_range(ref); +} + +/** + * Iterates over the cells in the worksheet and: + * - Trims whitespace from cell values. + * - Converts `Date` objects to ISO strings. + * + * https://stackoverflow.com/questions/61789174/how-can-i-remove-all-the-spaces-in-the-cells-of-excelsheet-using-nodejs-code + * @param worksheet + */ +export function prepareWorksheetCells(worksheet: xlsx.WorkSheet) { + const range = getWorksheetRange(worksheet); + + if (!range) { + return undefined; + } + + for (let r = range.s.r; r < range.e.r; r++) { + for (let c = range.s.c; c < range.e.c; c++) { + const coord = xlsx.utils.encode_cell({ r, c }); + let cell: CellObject = worksheet[coord]; + + if (!cell || !cell.v) { + continue; + } + + cell = replaceCellDates(cell); + + cell = trimCellWhitespace(cell); + } + } +} + +export function trimCellWhitespace(cell: CellObject) { + // check and clean raw strings + if (cell.t === 's') { + cell.v = safeTrim(cell.v); + } + + // check and clean formatted strings + if (cell.w) { + cell.w = safeTrim(cell.w); + } + + return cell; +} + +export function replaceCellDates(cell: CellObject) { + if (cell.t === 'd' && cell.v instanceof Date) { + cell.v = (cell.v as Date).toISOString(); + } + + return cell; +} + +export function getCellValue(cell: CellObject) { + // See https://www.npmjs.com/package/xlsx#cell-object for details on cell fields + return cell.v; +} diff --git a/api/src/utils/shared-api-docs.test.ts b/api/src/utils/shared-api-docs.test.ts index 2567e070ee..32cb16f378 100644 --- a/api/src/utils/shared-api-docs.test.ts +++ b/api/src/utils/shared-api-docs.test.ts @@ -1,10 +1,6 @@ import { expect } from 'chai'; import { describe } from 'mocha'; -import { - addFundingSourceApiDocObject, - attachmentApiDocObject, - deleteFundingSourceApiDocObject -} from './shared-api-docs'; +import { addFundingSourceApiDocObject, attachmentApiDocObject } from './shared-api-docs'; describe('attachmentApiResponseObject', () => { it('returns a valid response object', () => { @@ -15,15 +11,6 @@ describe('attachmentApiResponseObject', () => { }); }); -describe('deleteFundingSourceApiDocObject', () => { - it('returns a valid response object', () => { - const result = deleteFundingSourceApiDocObject('basic', 'success'); - - expect(result).to.not.be.null; - expect(result?.description).to.equal('basic'); - }); -}); - describe('addFundingSourceApiDocObject', () => { it('returns a valid response object', () => { const result = addFundingSourceApiDocObject('basic', 'success'); diff --git a/api/src/utils/shared-api-docs.ts b/api/src/utils/shared-api-docs.ts index ecfa06ad83..f36e440c84 100644 --- a/api/src/utils/shared-api-docs.ts +++ b/api/src/utils/shared-api-docs.ts @@ -48,54 +48,6 @@ export const attachmentApiDocObject = (basicDescription: string, successDescript }; }; -export const deleteFundingSourceApiDocObject = (basicDescription: string, successDescription: string) => { - return { - description: basicDescription, - tags: ['funding-sources'], - security: [ - { - Bearer: [] - } - ], - parameters: [ - { - in: 'path', - name: 'projectId', - schema: { - type: 'number' - }, - required: true - }, - { - in: 'path', - name: 'pfsId', - schema: { - type: 'number' - }, - required: true - } - ], - responses: { - 200: { - description: successDescription, - content: { - 'text/plain': { - schema: { - type: 'number' - } - } - } - }, - 401: { - $ref: '#/components/responses/401' - }, - default: { - $ref: '#/components/responses/default' - } - } - }; -}; - export const addFundingSourceApiDocObject = (basicDescription: string, successDescription: string) => { return { description: basicDescription, diff --git a/api/src/utils/spatial-utils.test.ts b/api/src/utils/spatial-utils.test.ts index ebe26b659f..d37e670e71 100644 --- a/api/src/utils/spatial-utils.test.ts +++ b/api/src/utils/spatial-utils.test.ts @@ -1,6 +1,6 @@ import { expect } from 'chai'; import { describe } from 'mocha'; -import { parseLatLongString, parseUTMString } from './spatial-utils'; +import { parseLatLongString, parseUTMString, utmToLatLng } from './spatial-utils'; describe('parseUTMString', () => { it('returns null when no UTM string provided', async () => { @@ -17,7 +17,7 @@ describe('parseUTMString', () => { }); it('returns null when UTM easting is too small', async () => { - const result = parseUTMString('9N 0 6114170'); + const result = parseUTMString('9N -1 6114170'); expect(result).to.be.null; }); @@ -141,3 +141,30 @@ describe('parseLatLongString', () => { expect(result).to.eql({ lat: 49.123, long: -120.123 }); }); }); + +describe('utmToLatLng', () => { + it('returns lat, long when zone_letter is provided', async () => { + const verbatimCoordinates = { + easting: 638449, + northing: 5460230, + zone_number: 11, + zone_letter: 'U', + zone_srid: 1 + }; + + expect(utmToLatLng(verbatimCoordinates)).to.eql({ latitude: 49.2791347287819, longitude: -115.09642191895463 }); + }); + + it('returns lat, long when zone_letter is NOT provided', async () => { + const verbatimCoordinates = { + easting: 638449, + northing: 5460230, + zone_number: 11, + zone_letter: undefined, + zone_srid: 1, + northern: true + }; + + expect(utmToLatLng(verbatimCoordinates)).to.eql({ latitude: 49.2791347287819, longitude: -115.09642191895463 }); + }); +}); diff --git a/api/src/utils/spatial-utils.ts b/api/src/utils/spatial-utils.ts index ee347d45be..c25d6cb12d 100644 --- a/api/src/utils/spatial-utils.ts +++ b/api/src/utils/spatial-utils.ts @@ -1,3 +1,7 @@ +import { Feature } from 'geojson'; +import SQL, { SQLStatement } from 'sql-template-strings'; +import { toLatLon } from 'utm'; + export interface IUTM { easting: number; northing: number; @@ -12,7 +16,7 @@ const SOPUTH_UTM_BASE_ZONE_NUMBER = 32700; const NORTH_UTM_ZONE_LETTERS = ['N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X']; const SOUTH_UTM_ZONE_LETTERS = ['C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M']; -const UTM_STRING_FORMAT = RegExp(/^[1-9]\d?[NPQRSTUVWXCDEFGHJKLM]? \d+ \d+$/i); +const UTM_STRING_FORMAT = RegExp(/^[1-9]\d?[NPQRSTUVWXCDEFGHJKLM]? \d{0,8}\.?\d{0,12} \d{0,8}\.?\d{0,12}$/i); const UTM_ZONE_WITH_LETTER_FORMAT = RegExp(/^[1-9]\d?[NPQRSTUVWXCDEFGHJKLM]$/i); /** @@ -117,3 +121,60 @@ export function parseLatLongString(latLong: string): ILatLong | null { return { lat, long }; } + +/** + * Function to generate the SQL for insertion of a geometry collection + * + * @export + * @param {(Feature | Feature[])} geometry + * @return {*} {SQLStatement} + */ +export function generateGeometryCollectionSQL(geometry: Feature | Feature[]): SQLStatement { + if (!Array.isArray(geometry)) { + const geo = JSON.stringify(geometry.geometry); + + return SQL`public.ST_Force2D(public.ST_GeomFromGeoJSON(${geo}))`; + } + + if (geometry.length === 1) { + const geo = JSON.stringify(geometry[0].geometry); + + return SQL`public.ST_Force2D(public.ST_GeomFromGeoJSON(${geo}))`; + } + + const sqlStatement: SQLStatement = SQL`public.ST_AsText(public.ST_Collect(array[`; + + geometry.forEach((geom: Feature, index: number) => { + const geo = JSON.stringify(geom.geometry); + + // as long as it is not the last geometry, keep adding to the ST_collect + if (index !== geometry.length - 1) { + sqlStatement.append(SQL` + public.ST_Force2D(public.ST_GeomFromGeoJSON(${geo})),`); + } else { + sqlStatement.append(SQL` + public.ST_Force2D(public.ST_GeomFromGeoJSON(${geo}))]))`); + } + }); + + return sqlStatement; +} + +export function utmToLatLng(verbatimCoordinates: IUTM): { latitude: number; longitude: number } { + if (verbatimCoordinates.zone_letter) { + return toLatLon( + verbatimCoordinates?.easting, + verbatimCoordinates?.northing, + verbatimCoordinates?.zone_number, + verbatimCoordinates?.zone_letter + ); + } else { + return toLatLon( + verbatimCoordinates?.easting, + verbatimCoordinates?.northing, + verbatimCoordinates?.zone_number, + undefined, + true + ); + } +} diff --git a/api/src/utils/string-utils.test.ts b/api/src/utils/string-utils.test.ts new file mode 100644 index 0000000000..80d4555b3f --- /dev/null +++ b/api/src/utils/string-utils.test.ts @@ -0,0 +1,120 @@ +import { expect } from 'chai'; +import { safeToLowerCase, safeTrim } from './string-utils'; + +describe('safeToLowerCase', () => { + describe('returns value lowercase', () => { + it('when value is a lowercase string', () => { + expect(safeToLowerCase('string')).to.equal('string'); + }); + + it('when value is an uppercase string', () => { + expect(safeToLowerCase('STRING')).to.equal('string'); + }); + + it('when value is a mixed case string', () => { + expect(safeToLowerCase('sTRiNG')).to.equal('string'); + }); + }); + + describe('returns value unaltered', () => { + it('when value is a negative number', () => { + expect(safeToLowerCase(-123)).to.equal(-123); + }); + + it('when value is a zero', () => { + expect(safeToLowerCase(0)).to.equal(0); + }); + + it('when value is a positive number', () => { + expect(safeToLowerCase(123)).to.equal(123); + }); + + it('when value is `false`', () => { + expect(safeToLowerCase(false)).to.equal(false); + }); + + it('when value is `true`', () => { + expect(safeToLowerCase(true)).to.equal(true); + }); + + it('when value is an empty object', () => { + expect(safeToLowerCase({})).to.eql({}); + }); + + it('when value is an empty array', () => { + expect(safeToLowerCase([])).to.eql([]); + }); + + it('when value is a non-empty array of numbers', () => { + expect(safeToLowerCase([1, 2, 3])).to.eql([1, 2, 3]); + }); + + it('when value is a non-empty array of strings', () => { + expect(safeToLowerCase(['1', 'string', 'false'])).to.eql(['1', 'string', 'false']); + }); + + it('when value is a function', () => { + const fn = (a: number, b: number) => a * b; + expect(safeToLowerCase(fn)).to.equal(fn); + }); + }); +}); + +describe('safeTrim', () => { + describe('returns value trimmed', () => { + it('when value is a lowercase string', () => { + expect(safeTrim(' string ')).to.equal('string'); + }); + + it('when value is an uppercase string', () => { + expect(safeTrim(' STRING ')).to.equal('STRING'); + }); + + it('when value is a mixed case string', () => { + expect(safeTrim(' sTRiNG ')).to.equal('sTRiNG'); + }); + }); + + describe('returns value unaltered', () => { + it('when value is a negative number', () => { + expect(safeTrim(-123)).to.equal(-123); + }); + + it('when value is a zero', () => { + expect(safeTrim(0)).to.equal(0); + }); + + it('when value is a positive number', () => { + expect(safeTrim(123)).to.equal(123); + }); + + it('when value is `false`', () => { + expect(safeTrim(false)).to.equal(false); + }); + + it('when value is `true`', () => { + expect(safeTrim(true)).to.equal(true); + }); + + it('when value is an empty object', () => { + expect(safeTrim({})).to.eql({}); + }); + + it('when value is an empty array', () => { + expect(safeTrim([])).to.eql([]); + }); + + it('when value is a non-empty array of numbers', () => { + expect(safeTrim([1, 2, 3])).to.eql([1, 2, 3]); + }); + + it('when value is a non-empty array of strings', () => { + expect(safeTrim([' 1 ', ' string ', ' false '])).to.eql([' 1 ', ' string ', ' false ']); + }); + + it('when value is a function', () => { + const fn = (a: number, b: number) => a * b; + expect(safeTrim(fn)).to.equal(fn); + }); + }); +}); diff --git a/api/src/utils/string-utils.ts b/api/src/utils/string-utils.ts new file mode 100644 index 0000000000..e0e9fa4619 --- /dev/null +++ b/api/src/utils/string-utils.ts @@ -0,0 +1,37 @@ +import { isString } from 'lodash'; + +/** + * Safely apply `.toLowerCase()` to a value of unknown type. + * + * If the value is not a string, then the original unaltered value will be returned. + * + * @export + * @template T + * @param {T} value + * @return {*} {T} + */ +export function safeToLowerCase(value: T): T { + if (isString(value)) { + return (value.toLowerCase() as unknown) as T; + } + + return value; +} + +/** + * Safely apply `.trim()` to a value of unknown type. + * + * If the value is not a string, then the original unaltered value will be returned. + * + * @export + * @template T + * @param {T} value + * @return {*} {T} + */ +export function safeTrim(value: T): T { + if (isString(value)) { + return (value.trim() as unknown) as T; + } + + return value; +} diff --git a/api/src/utils/submission-error.ts b/api/src/utils/submission-error.ts new file mode 100644 index 0000000000..3ecee340c8 --- /dev/null +++ b/api/src/utils/submission-error.ts @@ -0,0 +1,62 @@ +import { SUBMISSION_MESSAGE_TYPE, SUBMISSION_STATUS_TYPE, SUMMARY_SUBMISSION_MESSAGE_TYPE } from '../constants/status'; + +export const SubmissionErrorFromMessageType = (type: SUBMISSION_MESSAGE_TYPE): SubmissionError => { + const message = new MessageError(type); + return new SubmissionError({ messages: [message] }); +}; + +export const SummarySubmissionErrorFromMessageType = ( + type: SUMMARY_SUBMISSION_MESSAGE_TYPE +): SummarySubmissionError => { + const message = new MessageError(type); + return new SummarySubmissionError({ messages: [message] }); +}; + +export class MessageError extends Error { + type: T; + description: string; + errorCode: string; + + constructor(type: T, description?: string, errorCode?: string) { + super(type); + this.type = type; + this.description = type; + this.errorCode = type; + + if (description) { + this.description = description; + } + + if (errorCode) { + this.errorCode = errorCode; + } + } +} + +export class SubmissionError extends Error { + status: SUBMISSION_STATUS_TYPE; + submissionMessages: MessageError[]; + + constructor(params: { status?: SUBMISSION_STATUS_TYPE; messages?: MessageError[] }) { + const { status, messages } = params; + super(status || SUBMISSION_STATUS_TYPE.REJECTED); + + this.status = status || SUBMISSION_STATUS_TYPE.REJECTED; + this.submissionMessages = messages || []; + } + + setStatus(status: SUBMISSION_STATUS_TYPE) { + this.status = status; + } +} + +export class SummarySubmissionError extends Error { + summarySubmissionMessages: MessageError[]; + + constructor(params: { messages?: MessageError[] }) { + super(SUBMISSION_MESSAGE_TYPE.FAILED_PARSE_SUBMISSION); + const { messages } = params; + + this.summarySubmissionMessages = messages || []; + } +} diff --git a/app/.pipeline/.eslintrc b/app/.pipeline/.eslintrc new file mode 100644 index 0000000000..1a9658bb1a --- /dev/null +++ b/app/.pipeline/.eslintrc @@ -0,0 +1,21 @@ +{ + "root": true, + "extends": ["eslint:recommended", "plugin:prettier/recommended"], + "parserOptions": { + "ecmaVersion": 2018, + "sourceType": "module" + }, + "plugins": ["prettier"], + "env": { + "es6": true, + "node": true, + "mongo": true, + "mocha": true, + "jest": true, + "jasmine": true + }, + "rules": { + "prettier/prettier": ["warn"], + "no-var": "error" + } +} diff --git a/app/.pipeline/.prettierrc b/app/.pipeline/.prettierrc new file mode 100644 index 0000000000..a064d97523 --- /dev/null +++ b/app/.pipeline/.prettierrc @@ -0,0 +1,16 @@ +{ + "printWidth": 120, + "tabWidth": 2, + "useTabs": false, + "semi": true, + "singleQuote": true, + "trailingComma": "none", + "bracketSpacing": true, + "jsxBracketSameLine": true, + "requirePragma": false, + "insertPragma": false, + "proseWrap": "never", + "endOfLine": "lf", + "arrowParens": "always", + "htmlWhitespaceSensitivity": "ignore" +} \ No newline at end of file diff --git a/app/.pipeline/app.build.js b/app/.pipeline/app.build.js deleted file mode 100644 index 12e8b58640..0000000000 --- a/app/.pipeline/app.build.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; -const buildTask = require('./lib/app.build.js'); -const config = require('./config.js'); - -const settings = { ...config, phase: 'build' }; - -buildTask(settings); diff --git a/app/.pipeline/app.deploy.js b/app/.pipeline/app.deploy.js deleted file mode 100644 index b3e617e7c3..0000000000 --- a/app/.pipeline/app.deploy.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; -const config = require('./config.js'); -const deployAppTask = require('./lib/app.deploy.js'); - -const settings = { ...config, phase: config.options.env }; - -deployAppTask(settings); diff --git a/app/.pipeline/clean.js b/app/.pipeline/clean.js deleted file mode 100644 index 7bb8c8239f..0000000000 --- a/app/.pipeline/clean.js +++ /dev/null @@ -1,7 +0,0 @@ -'use strict'; -const config = require('./config.js'); -const cleanTask = require('./lib/clean.js'); - -const settings = { ...config, phase: config.options.env }; - -cleanTask(settings); diff --git a/app/.pipeline/config.js b/app/.pipeline/config.js index cf60906e24..282183572c 100644 --- a/app/.pipeline/config.js +++ b/app/.pipeline/config.js @@ -1,35 +1,33 @@ 'use strict'; + +let process = require('process'); + let options = require('pipeline-cli').Util.parseArguments(); // The root config for common values const config = require('../../.config/config.json'); -const defaultHost = 'biohubbc-af2668-dev.apps.silver.devops.gov.bc.ca'; -const defaultHostAPI = 'biohubbc-af2668-api-dev.apps.silver.devops.gov.bc.ca'; +const name = config.module.app; +const apiName = config.module.api; -const name = (config.module && config.module['app']) || 'biohubbc-app'; -const apiName = (config.module && config.module['api']) || 'biohubbc-api'; +const version = config.version; -const changeId = options.pr || `${Math.floor(Date.now() * 1000) / 60.0}`; // aka pull-request or branch -const version = config.version || '1.0.0'; +const changeId = options.pr; // pull-request number or branch name // A static deployment is when the deployment is updating dev, test, or prod (rather than a temporary PR) +// See `--type=static` in the `deployStatic.yml` git workflow const isStaticDeployment = options.type === 'static'; const deployChangeId = (isStaticDeployment && 'deploy') || changeId; const branch = (isStaticDeployment && options.branch) || null; const tag = (branch && `build-${version}-${changeId}-${branch}`) || `build-${version}-${changeId}`; -const staticBranches = config.staticBranches || []; const staticUrls = config.staticUrls || {}; const staticUrlsAPI = config.staticUrlsAPI || {}; -const staticUrlsN8N = config.staticUrlsN8N || {}; const maxUploadNumFiles = 10; const maxUploadFileSize = 52428800; // (bytes) -const sso = config.sso; - const processOptions = (options) => { const result = { ...options }; @@ -65,7 +63,11 @@ const phases = { version: `${version}-${changeId}`, tag: tag, env: 'build', - branch: branch + branch: branch, + cpuRequest: '100m', + cpuLimit: '1000m', + memoryRequest: '512Mi', + memoryLimit: '5Gi' }, dev: { namespace: 'af2668-dev', @@ -76,20 +78,20 @@ const phases = { instance: `${name}-dev-${deployChangeId}`, version: `${deployChangeId}-${changeId}`, tag: `dev-${version}-${deployChangeId}`, - host: - (isStaticDeployment && (staticUrls.dev || defaultHost)) || - `${name}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, + host: (isStaticDeployment && staticUrls.dev) || `${name}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, apiHost: - (isStaticDeployment && (staticUrlsAPI.dev || defaultHostAPI)) || - `${apiName}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, - n8nHost: '', // staticUrlsN8N.dev, // Disable until nginx is setup: https://quartech.atlassian.net/browse/BHBC-1435 + (isStaticDeployment && staticUrlsAPI.dev) || `${apiName}-${changeId}-af2668-dev.apps.silver.devops.gov.bc.ca`, siteminderLogoutURL: config.siteminderLogoutURL.dev, maxUploadNumFiles, maxUploadFileSize, env: 'dev', - sso: sso.dev, - replicas: 1, - maxReplicas: 1 + sso: config.sso.dev, + cpuRequest: '50m', + cpuLimit: (isStaticDeployment && '300m') || '200m', + memoryRequest: '50Mi', + memoryLimit: (isStaticDeployment && '300Mi') || '200Mi', + replicas: '1', + replicasMax: (isStaticDeployment && '2') || '1' }, test: { namespace: 'af2668-test', @@ -101,15 +103,19 @@ const phases = { version: `${version}`, tag: `test-${version}`, host: staticUrls.test, - apiHost: staticUrlsAPI.test || defaultHostAPI, + apiHost: staticUrlsAPI.test, n8nHost: '', // staticUrlsN8N.test, // Disable until nginx is setup: https://quartech.atlassian.net/browse/BHBC-1435 siteminderLogoutURL: config.siteminderLogoutURL.test, maxUploadNumFiles, maxUploadFileSize, env: 'test', - sso: sso.test, - replicas: 3, - maxReplicas: 5 + sso: config.sso.test, + cpuRequest: '100m', + cpuLimit: '400m', + memoryRequest: '100Mi', + memoryLimit: '400Mi', + replicas: '2', + replicasMax: '3' }, prod: { namespace: 'af2668-prod', @@ -121,22 +127,25 @@ const phases = { version: `${version}`, tag: `prod-${version}`, host: staticUrls.prod, - apiHost: staticUrlsAPI.prod || defaultHostAPI, - n8nHost: '', // staticUrlsN8N.prod, // Disable until nginx is setup: https://quartech.atlassian.net/browse/BHBC-1435 + apiHost: staticUrlsAPI.prod, siteminderLogoutURL: config.siteminderLogoutURL.prod, maxUploadNumFiles, maxUploadFileSize, env: 'prod', - sso: sso.prod, - replicas: 3, - maxReplicas: 6 + sso: config.sso.prod, + cpuRequest: '100m', + cpuLimit: '400m', + memoryRequest: '100Mi', + memoryLimit: '400Mi', + replicas: '2', + replicasMax: '4' } }; // This callback forces the node process to exit as failure. -process.on('unhandledRejection', (reason) => { - console.log(reason); +process.on('unhandledRejection', (reason, promise) => { + console.log('Unhandled Rejection at:', promise, 'reason:', reason); process.exit(1); }); -module.exports = exports = { phases, options, staticBranches }; +module.exports = exports = { phases, options }; diff --git a/app/.pipeline/lib/app.build.js b/app/.pipeline/lib/app.build.js index c83449cfe3..f9fa3eb5d0 100644 --- a/app/.pipeline/lib/app.build.js +++ b/app/.pipeline/lib/app.build.js @@ -1,15 +1,16 @@ 'use strict'; + const { OpenShiftClientX } = require('pipeline-cli'); const path = require('path'); -module.exports = (settings) => { +const appBuild = (settings) => { const phases = settings.phases; const options = settings.options; const phase = 'build'; - const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options)); + const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../../openshift')); + const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../templates')); let objects = []; @@ -20,7 +21,11 @@ module.exports = (settings) => { SUFFIX: phases[phase].suffix, VERSION: phases[phase].tag, SOURCE_REPOSITORY_URL: oc.git.http_url, - SOURCE_REPOSITORY_REF: phases[phase].branch || oc.git.ref + SOURCE_REPOSITORY_REF: phases[phase].branch || oc.git.ref, + CPU_REQUEST: phases[phase].cpuRequest, + CPU_LIMIT: phases[phase].cpuLimit, + MEMORY_REQUEST: phases[phase].memoryRequest, + MEMORY_LIMIT: phases[phase].memoryLimit } }) ); @@ -29,3 +34,5 @@ module.exports = (settings) => { console.log(`${JSON.stringify(objects, null, 2)}`); oc.applyAndBuild(objects); }; + +module.exports = { appBuild }; diff --git a/app/.pipeline/lib/app.deploy.js b/app/.pipeline/lib/app.deploy.js index 00b9aef0be..fe401a756c 100644 --- a/app/.pipeline/lib/app.deploy.js +++ b/app/.pipeline/lib/app.deploy.js @@ -1,15 +1,16 @@ 'use strict'; + const { OpenShiftClientX } = require('pipeline-cli'); const path = require('path'); -module.exports = (settings) => { +const appDeploy = (settings) => { const phases = settings.phases; const options = settings.options; const phase = options.env; const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options)); - const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../../openshift')); + const templatesLocalBaseUrl = oc.toFileUrl(path.resolve(__dirname, '../templates')); const changeId = phases[phase].changeId; @@ -30,11 +31,15 @@ module.exports = (settings) => { REACT_APP_MAX_UPLOAD_FILE_SIZE: phases[phase].maxUploadFileSize, NODE_ENV: phases[phase].env || 'dev', REACT_APP_NODE_ENV: phases[phase].env || 'dev', - SSO_URL: phases[phase].sso.url, - SSO_CLIENT_ID: phases[phase].sso.clientId, - SSO_REALM: phases[phase].sso.realm, - REPLICAS: phases[phase].replicas || 1, - REPLICA_MAX: phases[phase].maxReplicas || 1 + REACT_APP_KEYCLOAK_HOST: phases[phase].sso.url, + REACT_APP_KEYCLOAK_REALM: phases[phase].sso.realm, + REACT_APP_KEYCLOAK_CLIENT_ID: phases[phase].sso.clientId, + CPU_REQUEST: phases[phase].cpuRequest, + CPU_LIMIT: phases[phase].cpuLimit, + MEMORY_REQUEST: phases[phase].memoryRequest, + MEMORY_LIMIT: phases[phase].memoryLimit, + REPLICAS: phases[phase].replicas, + REPLICAS_MAX: phases[phase].replicasMax } }) ); @@ -44,3 +49,5 @@ module.exports = (settings) => { oc.applyAndDeploy(objects, phases[phase].instance); }; + +module.exports = { appDeploy }; diff --git a/app/.pipeline/lib/clean.js b/app/.pipeline/lib/clean.js index 9d0ab2cfa4..5f5423386d 100644 --- a/app/.pipeline/lib/clean.js +++ b/app/.pipeline/lib/clean.js @@ -1,4 +1,5 @@ 'use strict'; + const { OpenShiftClientX } = require('pipeline-cli'); /** @@ -6,7 +7,7 @@ const { OpenShiftClientX } = require('pipeline-cli'); * * @param {*} settings */ -module.exports = (settings) => { +const clean = (settings) => { const phases = settings.phases; const options = settings.options; const target_phase = options.env; @@ -14,7 +15,7 @@ module.exports = (settings) => { const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options)); for (let phaseKey in phases) { - if (!phases.hasOwnProperty(phaseKey)) { + if (!Object.prototype.hasOwnProperty.call(phases, phaseKey)) { continue; } @@ -73,3 +74,5 @@ module.exports = (settings) => { }); } }; + +module.exports = { clean }; diff --git a/app/.pipeline/npmw b/app/.pipeline/npmw deleted file mode 100644 index 1eed7c9536..0000000000 --- a/app/.pipeline/npmw +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/sh -set +x -type -t nvm && nvm deactivate -export NVM_DIR="$(git rev-parse --show-toplevel)/.nvm" -if [ ! -f "$NVM_DIR/nvm.sh" ]; then - mkdir -p "${NVM_DIR}" - curl -sSL -o- https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash &>/dev/null -fi -source "$NVM_DIR/nvm.sh" &>/dev/null -METHOD=script nvm install --no-progress &>/dev/null -nvm use &>/dev/null -exec npm "$@" diff --git a/app/.pipeline/package-lock.json b/app/.pipeline/package-lock.json index 4438b4d725..a31f39821e 100644 --- a/app/.pipeline/package-lock.json +++ b/app/.pipeline/package-lock.json @@ -5,9 +5,9 @@ "requires": true, "dependencies": { "debug": { - "version": "4.3.1", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", - "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "requires": { "ms": "2.1.2" } @@ -15,7 +15,7 @@ "lodash.isempty": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/lodash.isempty/-/lodash.isempty-4.4.0.tgz", - "integrity": "sha1-b4bL7di+TsmHvpqvM8loTbGzHn4=" + "integrity": "sha512-oKMuF3xEeqDltrGMfDxAPGIVMSSRv8tbRSODbrs4KGsRRLEhrW8N8Rd4DRgB2+621hY8A8XwwrTVhXWpxFvMzg==" }, "lodash.isfunction": { "version": "3.0.9", @@ -25,12 +25,12 @@ "lodash.isplainobject": { "version": "4.0.6", "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", - "integrity": "sha1-fFJqUtibRcRcxpC4gWO+BJf1UMs=" + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" }, "lodash.isstring": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", - "integrity": "sha1-1SfftUVuynzJu5XV2ur4i6VKVFE=" + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" }, "ms": { "version": "2.1.2", @@ -38,8 +38,8 @@ "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" }, "pipeline-cli": { - "version": "git+https://github.com/BCDevOps/pipeline-cli.git#256319700bc36145fea8511aa9a66f8f8f577926", - "from": "git+https://github.com/BCDevOps/pipeline-cli.git", + "version": "github:NickPhura/pipeline-cli#ef145da35393eb92f71f19aef6aad768f3534a5e", + "from": "github:NickPhura/pipeline-cli#master", "requires": { "debug": "^4.2.0", "lodash.isempty": "^4.0.1", @@ -47,6 +47,12 @@ "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1" } + }, + "prettier": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.3.2.tgz", + "integrity": "sha512-lnJzDfJ66zkMy58OL5/NY5zp70S7Nz6KqcKkXYzn2tMVrNxvbqaBpg7H3qHaLxCJ5lNMsGuM8+ohS7cZrthdLQ==", + "dev": true } } } diff --git a/app/.pipeline/package.json b/app/.pipeline/package.json index 3541a1ed06..9d9a8341f2 100644 --- a/app/.pipeline/package.json +++ b/app/.pipeline/package.json @@ -12,17 +12,23 @@ "url": "git+https://github.com/bcgov/ocp-sso.git" }, "scripts": { - "build": "node app.build.js", - "deploy": "node app.deploy.js", - "clean": "node clean.js", - "version": "echo \"node@$(node --version) ($(which node))\" && echo \"npm@$(npm --version) ($(which npm))\" && npm ls" + "build": "node scripts/app.build.js", + "deploy": "node scripts/app.deploy.js", + "clean": "node scripts/clean.js", + "lint": "eslint . --ignore-pattern 'node_modules' --ext .js,.ts", + "lint-fix": "eslint . --fix --ignore-pattern 'node_modules' --ext .js,.ts", + "format": "prettier --check \"./**/*.{js,jsx,ts,tsx,css,scss}\"", + "format-fix": "prettier --write \"./**/*.{js,jsx,ts,tsx,json,css,scss}\"" }, "dependencies": { - "pipeline-cli": "git+https://github.com/BCDevOps/pipeline-cli.git", "debug": "^4.2.0", "lodash.isempty": "^4.0.1", "lodash.isfunction": "^3.0.9", "lodash.isplainobject": "^4.0.6", - "lodash.isstring": "^4.0.1" + "lodash.isstring": "^4.0.1", + "pipeline-cli": "NickPhura/pipeline-cli#master" + }, + "devDependencies": { + "prettier": "~2.3.2" } } diff --git a/app/.pipeline/scripts/app.build.js b/app/.pipeline/scripts/app.build.js new file mode 100644 index 0000000000..aeafd09c2a --- /dev/null +++ b/app/.pipeline/scripts/app.build.js @@ -0,0 +1,8 @@ +'use strict'; + +const config = require('../config.js'); +const { appBuild } = require('../lib/app.build.js'); + +const settings = { ...config, phase: 'build' }; + +appBuild(settings); diff --git a/app/.pipeline/scripts/app.deploy.js b/app/.pipeline/scripts/app.deploy.js new file mode 100644 index 0000000000..28c6eb4791 --- /dev/null +++ b/app/.pipeline/scripts/app.deploy.js @@ -0,0 +1,8 @@ +'use strict'; + +const { appDeploy } = require('../lib/app.deploy.js'); +const config = require('../config.js'); + +const settings = { ...config, phase: config.options.env }; + +appDeploy(settings); diff --git a/app/.pipeline/scripts/clean.js b/app/.pipeline/scripts/clean.js new file mode 100644 index 0000000000..62c6a35451 --- /dev/null +++ b/app/.pipeline/scripts/clean.js @@ -0,0 +1,9 @@ +'use strict'; + +const { clean } = require('../lib/clean.js'); +const config = require('../config.js'); + +const settings = { ...config, phase: config.options.env }; + +// Cleans all build and deployment artifacts (pods, etc) +clean(settings); diff --git a/app/openshift/app.bc.yaml b/app/.pipeline/templates/app.bc.yaml similarity index 86% rename from app/openshift/app.bc.yaml rename to app/.pipeline/templates/app.bc.yaml index 9469d3c07a..14aaeec846 100644 --- a/app/openshift/app.bc.yaml +++ b/app/.pipeline/templates/app.bc.yaml @@ -1,10 +1,9 @@ apiVersion: template.openshift.io/v1 kind: Template metadata: - creationTimestamp: null + name: biohubbc-app-bc labels: build: biohubbc-app - name: biohubbc-app parameters: - name: NAME displayName: Name @@ -37,6 +36,14 @@ parameters: - name: SOURCE_IMAGE_TAG required: true value: 14-ubi8 + - name: CPU_REQUEST + value: '100m' + - name: CPU_LIMIT + value: '1000m' + - name: MEMORY_REQUEST + value: '512Mi' + - name: MEMORY_LIMIT + value: '5Gi' objects: - kind: ImageStream apiVersion: image.openshift.io/v1 @@ -59,6 +66,7 @@ objects: importPolicy: {} referencePolicy: type: Local + - kind: ImageStream apiVersion: image.openshift.io/v1 metadata: @@ -71,8 +79,9 @@ objects: spec: lookupPolicy: local: false - - apiVersion: v1 - kind: BuildConfig + + - kind: BuildConfig + apiVersion: build.openshift.io/v1 metadata: name: '${NAME}${SUFFIX}' creationTimestamp: @@ -87,12 +96,12 @@ objects: name: '${NAME}:${VERSION}' postCommit: {} resources: - limits: - cpu: 1000m - memory: 5Gi requests: - cpu: 100m - memory: 512Mi + cpu: ${CPU_REQUEST} + memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} runPolicy: Serial source: contextDir: '${SOURCE_CONTEXT_DIR}' @@ -110,5 +119,3 @@ objects: triggers: - type: ConfigChange - type: ImageChange - status: - lastVersion: 7 diff --git a/app/openshift/app.dc.yaml b/app/.pipeline/templates/app.dc.yaml similarity index 79% rename from app/openshift/app.dc.yaml rename to app/.pipeline/templates/app.dc.yaml index c7af3ad6a9..9ff3b76da6 100644 --- a/app/openshift/app.dc.yaml +++ b/app/.pipeline/templates/app.dc.yaml @@ -1,9 +1,9 @@ apiVersion: template.openshift.io/v1 kind: Template metadata: - resourceVersion: '' - selfLink: '' name: biohubbc-app-dc + labels: + build: biohubbc-app parameters: - name: NAME value: biohubbc-app @@ -44,17 +44,23 @@ parameters: - name: APP_PORT_DEFAULT_NAME description: Default port resource name value: '7100-tcp' - - name: SSO_URL + - name: REACT_APP_KEYCLOAK_HOST description: Key clock login url - value: 'https://oidc.gov.bc.ca/auth' - - name: SSO_CLIENT_ID - description: Client Id for application - value: 'lucy' - - name: SSO_REALM + required: true + - name: REACT_APP_KEYCLOAK_REALM description: Realm identifier or name - value: 35r1iman + required: true + - name: REACT_APP_KEYCLOAK_CLIENT_ID + description: Client Id for application + required: true + - name: REACT_APP_KEYCLOAK_INTEGRATION_ID + description: integration Id + - name: REACT_APP_KEYCLOAK_ADMIN_HOST + description: admin host + - name: REACT_APP_KEYCLOAK_API_HOST + description: api host - name: CPU_REQUEST - value: '10m' + value: '50m' - name: CPU_LIMIT value: '200m' - name: MEMORY_REQUEST @@ -63,12 +69,11 @@ parameters: value: '200Mi' - name: REPLICAS value: '1' - - name: REPLICA_MAX - required: true + - name: REPLICAS_MAX value: '1' objects: - - apiVersion: image.openshift.io/v1 - kind: ImageStream + - kind: ImageStream + apiVersion: image.openshift.io/v1 metadata: annotations: description: Nodejs Runtime Image @@ -80,10 +85,11 @@ objects: spec: lookupPolicy: local: false - status: - dockerImageRepository: null - - apiVersion: apps.openshift.io/v1 - kind: DeploymentConfig + status: + dockerImageRepository: null + + - kind: DeploymentConfig + apiVersion: apps.openshift.io/v1 metadata: annotations: openshift.io/generated-by: OpenShiftWebConsole @@ -93,7 +99,7 @@ objects: role: app name: ${NAME}${SUFFIX} spec: - replicas: '${{REPLICAS}}' + replicas: ${{REPLICAS}} revisionHistoryLimit: 10 selector: deploymentConfig: ${NAME}${SUFFIX} @@ -102,12 +108,12 @@ objects: recreateParams: timeoutSeconds: 600 resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} requests: cpu: ${CPU_REQUEST} memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} type: Rolling template: metadata: @@ -147,12 +153,14 @@ objects: value: ${REACT_APP_NODE_ENV} - name: VERSION value: ${VERSION} - - name: SSO_URL - value: ${SSO_URL} - - name: SSO_CLIENT_ID - value: ${SSO_CLIENT_ID} - - name: SSO_REALM - value: ${SSO_REALM} + - name: REACT_APP_KEYCLOAK_HOST + value: ${REACT_APP_KEYCLOAK_HOST} + - name: REACT_APP_KEYCLOAK_CLIENT_ID + value: ${REACT_APP_KEYCLOAK_CLIENT_ID} + - name: REACT_APP_KEYCLOAK_REALM + value: ${REACT_APP_KEYCLOAK_REALM} + - name: REACT_APP_KEYCLOAK_INTEGRATION_ID + value: ${REACT_APP_KEYCLOAK_INTEGRATION_ID} image: ' ' imagePullPolicy: Always name: app @@ -160,32 +168,32 @@ objects: - containerPort: ${{APP_PORT_DEFAULT}} protocol: TCP resources: - limits: - cpu: ${CPU_LIMIT} - memory: ${MEMORY_LIMIT} requests: cpu: ${CPU_REQUEST} memory: ${MEMORY_REQUEST} + limits: + cpu: ${CPU_LIMIT} + memory: ${MEMORY_LIMIT} readinessProbe: - failureThreshold: 10 httpGet: path: / port: ${{APP_PORT_DEFAULT}} scheme: HTTP - initialDelaySeconds: 65 - periodSeconds: 20 + initialDelaySeconds: 60 + periodSeconds: 30 + timeoutSeconds: 15 successThreshold: 1 - timeoutSeconds: 20 + failureThreshold: 20 livenessProbe: - failureThreshold: 10 httpGet: path: / port: ${{APP_PORT_DEFAULT}} scheme: HTTP - initialDelaySeconds: 65 + initialDelaySeconds: 60 periodSeconds: 30 + timeoutSeconds: 15 successThreshold: 1 - timeoutSeconds: 20 + failureThreshold: 20 terminationMessagePath: /dev/termination-log terminationMessagePolicy: File dnsPolicy: ClusterFirst @@ -207,8 +215,16 @@ objects: name: ${NAME}:${VERSION} type: ImageChange - type: ConfigChange - - apiVersion: v1 - kind: Service + status: + availableReplicas: 0 + latestVersion: 0 + observedGeneration: 0 + replicas: 0 + unavailableReplicas: 0 + updatedReplicas: 0 + + - kind: Service + apiVersion: v1 metadata: annotations: null creationTimestamp: null @@ -226,8 +242,9 @@ objects: type: ClusterIP status: loadBalancer: {} - - apiVersion: route.openshift.io/v1 - kind: Route + + - kind: Route + apiVersion: route.openshift.io/v1 metadata: annotations: {} creationTimestamp: null @@ -247,16 +264,17 @@ objects: wildcardPolicy: None status: ingress: null - - apiVersion: autoscaling/v2beta2 - kind: HorizontalPodAutoscaler + + - kind: HorizontalPodAutoscaler + apiVersion: autoscaling/v2beta2 metadata: annotations: {} creationTimestamp: null labels: {} name: ${NAME}${SUFFIX} spec: - maxReplicas: '${{REPLICA_MAX}}' - minReplicas: '${{REPLICAS}}' + minReplicas: ${{REPLICAS}} + maxReplicas: ${{REPLICAS_MAX}} scaleTargetRef: apiVersion: apps.openshift.io/v1 kind: DeploymentConfig @@ -267,4 +285,4 @@ objects: name: cpu target: type: Utilization - averageUtilization: 95 + averageUtilization: 80 diff --git a/app/openshift/app.bc.json b/app/openshift/app.bc.json deleted file mode 100644 index d4f50de73b..0000000000 --- a/app/openshift/app.bc.json +++ /dev/null @@ -1,172 +0,0 @@ -{ - "apiVersion": "v1", - "kind": "Template", - "metadata": { - "creationTimestamp": null, - "labels": { - "build": "biohubbc-app" - }, - "name": "biohubbc-app" - }, - "parameters": [ - { - "name": "NAME", - "displayName": "Name", - "description": "A suffix appended to all objects", - "required": true, - "value": "biohubbc-app" - }, - { - "name": "SUFFIX", - "displayName": "Name Suffix", - "description": "A suffix appended to all objects", - "required": true, - "value": "dev" - }, - { - "name": "VERSION", - "required": true, - "value": "1.0" - }, - { - "name": "SOURCE_CONTEXT_DIR", - "required": true, - "value": "app" - }, - { - "name": "SOURCE_REPOSITORY_URL", - "required": true, - "value": "https://github.com/bcgov/biohubbc" - }, - { - "name": "SOURCE_REPOSITORY_REF", - "required": false, - "value": "dev" - }, - { - "name": "SOURCE_IMAGE_NAME", - "required": true, - "value": "rhscl-nodejs-8-rhel7" - }, - { - "name": "SOURCE_IMAGE_TAG", - "required": true, - "value": "1-50" - } - ], - "objects":[ - { - "kind": "ImageStream", - "apiVersion": "v1", - "metadata": { - "name": "rhscl-nodejs-8-rhel7", - "creationTimestamp": null, - "annotations": { - "description": "Nodejs Base Image" - }, - "labels": { - "shared": "true" - } - }, - "spec": { - "lookupPolicy": { - "local": false - }, - "tags": [ - { - "name": "1-50", - "annotations": null, - "from": { - "kind": "DockerImage", - "name": "registry.access.redhat.com/rhscl/nodejs-8-rhel7:1-50" - }, - "importPolicy": {}, - "referencePolicy": { - "type": "Local" - } - } - ] - } - }, - { - "kind": "ImageStream", - "apiVersion": "v1", - "metadata": { - "name": "${NAME}", - "creationTimestamp": null, - "annotations": { - "description": "Nodejs Runtime Image" - }, - "labels": { - "shared": "true" - } - }, - "spec": { - "lookupPolicy": { - "local": false - } - } - }, - { - "apiVersion": "v1", - "kind": "BuildConfig", - "metadata": { - "name": "${NAME}${SUFFIX}", - "creationTimestamp": null, - "labels": {}, - "annotations": {} - }, - "spec": { - "failedBuildsHistoryLimit": 5, - "nodeSelector": null, - "output": { - "to": { - "kind": "ImageStreamTag", - "name": "${NAME}:${VERSION}" - } - }, - "postCommit": {}, - "resources": { - "limits": { - "cpu": "1250m", - "memory": "2Gi" - }, - "requests": { - "cpu": "500m", - "memory": "1Gi" - } - }, - "runPolicy": "Serial", - "source": { - "contextDir": "${SOURCE_CONTEXT_DIR}", - "git": { - "uri": "${SOURCE_REPOSITORY_URL}", - "ref": "${SOURCE_REPOSITORY_REF}" - }, - "type": "Git" - }, - "strategy": { - "dockerStrategy": { - "from": { - "kind": "ImageStreamTag", - "name": "rhscl-nodejs-8-rhel7:1-50" - } - }, - "type": "Docker" - }, - "successfulBuildsHistoryLimit": 5, - "triggers": [ - { - "type": "ConfigChange" - }, - { - "type": "ImageChange" - } - ] - }, - "status": { - "lastVersion": 7 - } - } - ] -} diff --git a/app/package-lock.json b/app/package-lock.json index 0da2e56ce1..9299633e6a 100644 --- a/app/package-lock.json +++ b/app/package-lock.json @@ -1765,15 +1765,27 @@ } }, "@material-ui/lab": { - "version": "4.0.0-alpha.60", - "resolved": "https://registry.npmjs.org/@material-ui/lab/-/lab-4.0.0-alpha.60.tgz", - "integrity": "sha512-fadlYsPJF+0fx2lRuyqAuJj7hAS1tLDdIEEdov5jlrpb5pp4b+mRDUqQTUxi4inRZHS1bEXpU8QWUhO6xX88aA==", + "version": "4.0.0-alpha.61", + "resolved": "https://registry.npmjs.org/@material-ui/lab/-/lab-4.0.0-alpha.61.tgz", + "integrity": "sha512-rSzm+XKiNUjKegj8bzt5+pygZeckNLOr+IjykH8sYdVk7dE9y2ZuUSofiMV2bJk3qU+JHwexmw+q0RyNZB9ugg==", "requires": { "@babel/runtime": "^7.4.4", - "@material-ui/utils": "^4.11.2", + "@material-ui/utils": "^4.11.3", "clsx": "^1.0.4", "prop-types": "^15.7.2", "react-is": "^16.8.0 || ^17.0.0" + }, + "dependencies": { + "@material-ui/utils": { + "version": "4.11.3", + "resolved": "https://registry.npmjs.org/@material-ui/utils/-/utils-4.11.3.tgz", + "integrity": "sha512-ZuQPV4rBK/V1j2dIkSSEcH5uT6AaHuKWFfotADHsC0wVL1NLd2WkFCm4ZZbX33iO4ydl6V0GPngKm8HZQ2oujg==", + "requires": { + "@babel/runtime": "^7.4.4", + "prop-types": "^15.7.2", + "react-is": "^16.8.0 || ^17.0.0" + } + } } }, "@material-ui/pickers": { @@ -1839,9 +1851,9 @@ } }, "@mdi/js": { - "version": "6.4.95", - "resolved": "https://registry.npmjs.org/@mdi/js/-/js-6.4.95.tgz", - "integrity": "sha512-b1/P//1D2KOzta8YRGyoSLGsAlWyUHfxzVBhV4e/ppnjM4DfBgay/vWz7Eg5Ee80JZ4zsQz8h54X+KOahtBk5Q==" + "version": "6.9.96", + "resolved": "https://registry.npmjs.org/@mdi/js/-/js-6.9.96.tgz", + "integrity": "sha512-rK0/vLFaiItYS2W7uVmaKPKnhNQE4XVkylpk5njtVwENnp8elwY5uRL6qvdj2esuvUHG7DwygE4Qu3eKxxuJiQ==" }, "@mdi/react": { "version": "1.4.0", @@ -2165,6 +2177,16 @@ "@testing-library/dom": "^7.28.1" } }, + "@testing-library/react-hooks": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/@testing-library/react-hooks/-/react-hooks-8.0.1.tgz", + "integrity": "sha512-Aqhl2IVmLt8IovEVarNDFuJDVWVvhnr9/GCU6UUnrYXwgDFF9h2L2o2P9KBni1AST5sT6riAyoukFLyjQUgD/g==", + "dev": true, + "requires": { + "@babel/runtime": "^7.12.5", + "react-error-boundary": "^3.1.0" + } + }, "@testing-library/user-event": { "version": "12.6.0", "resolved": "https://registry.npmjs.org/@testing-library/user-event/-/user-event-12.6.0.tgz", @@ -2438,14 +2460,23 @@ "dev": true }, "@types/leaflet": { - "version": "1.5.23", - "resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.5.23.tgz", - "integrity": "sha512-S/xpuwjZuwYMP+4ZzQ10PX0Jy+0XmwPeojtjqhbca9UXaINdoru91Qm/DUUXyh4qYm3CP6Vher06l/UcA9tUKQ==", + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/@types/leaflet/-/leaflet-1.8.0.tgz", + "integrity": "sha512-+sXFmiJTFdhaXXIGFlV5re9AdqtAODoXbGAvxx02e5SHXL3ir7ClP5J7pahO8VmzKY3dth4RUS1nf2BTT+DW1A==", "dev": true, "requires": { "@types/geojson": "*" } }, + "@types/leaflet-draw": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/leaflet-draw/-/leaflet-draw-1.0.5.tgz", + "integrity": "sha512-m/vmhI1XjiBZphwKwmmcOoxbMd0ybN78uZb1c9//zOG0aWVYJd3XTnIW1pnPhxhIB4EB1tgjmWjq0YQM/IyuXg==", + "dev": true, + "requires": { + "@types/leaflet": "*" + } + }, "@types/leaflet-fullscreen": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/@types/leaflet-fullscreen/-/leaflet-fullscreen-1.0.6.tgz", @@ -4010,7 +4041,8 @@ "base64-js": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.1.tgz", - "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==" + "integrity": "sha512-mLQ4i2QO1ytvGWFWmcngKO//JXAQueZvwEKtjgQFM4jIK0kU+ytMfplL8j+n5mspOfjHwoAg+9yhb7BwAHm36g==", + "dev": true }, "batch": { "version": "0.6.1", @@ -10679,12 +10711,19 @@ } }, "keycloak-js": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/keycloak-js/-/keycloak-js-9.0.3.tgz", - "integrity": "sha512-c8FFPa8YiJmPbJEMZ/mIrHflBR6FIFUm5xTWtIDzlrnoeF4u0wDmTBfo1u71rWIL1HanLvg3T+9AgR1NqfmGbA==", + "version": "20.0.2", + "resolved": "https://registry.npmjs.org/keycloak-js/-/keycloak-js-20.0.2.tgz", + "integrity": "sha512-RPLeBdrsB4ybc2tWL5R+tUqdUd62ytZLVT7AWcCCqMWKuQ0AbmrOtGaPnnWMzS/3XJH447nDq1ulUOGzpj41LQ==", "requires": { - "base64-js": "1.3.1", - "js-sha256": "0.9.0" + "base64-js": "^1.5.1", + "js-sha256": "^0.9.0" + }, + "dependencies": { + "base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" + } } }, "killable": { @@ -14084,6 +14123,15 @@ } } }, + "react-error-boundary": { + "version": "3.1.4", + "resolved": "https://registry.npmjs.org/react-error-boundary/-/react-error-boundary-3.1.4.tgz", + "integrity": "sha512-uM9uPzZJTF6wRQORmSrvOIgt4lJ9MC1sNgEOj2XGsDTRE4kmpWxg7ENK9EWNKJRMAOY9z0MuF4yIfl6gp4sotA==", + "dev": true, + "requires": { + "@babel/runtime": "^7.12.5" + } + }, "react-error-overlay": { "version": "6.0.8", "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.8.tgz", diff --git a/app/package.json b/app/package.json index 62060a2c0b..2708a1d456 100644 --- a/app/package.json +++ b/app/package.json @@ -32,7 +32,7 @@ "@material-ui/pickers": "~3.2.10", "@material-ui/styles": "~4.10.0", "@material-ui/system": "4.11.3", - "@mdi/js": "~6.4.95", + "@mdi/js": "~6.9.96", "@mdi/react": "~1.4.0", "@react-keycloak/web": "~2.1.0", "@react-leaflet/core": "~1.0.2", @@ -45,7 +45,7 @@ "clsx": "~1.1.1", "express": "~4.17.1", "formik": "~2.2.6", - "keycloak-js": "~9.0.2", + "keycloak-js": "^20.0.2", "leaflet": "~1.7.1", "leaflet-draw": "~1.0.4", "leaflet-fullscreen": "~1.0.2", @@ -73,10 +73,12 @@ "@babel/preset-typescript": "~7.12.7", "@testing-library/jest-dom": "~5.11.8", "@testing-library/react": "~11.2.3", + "@testing-library/react-hooks": "^8.0.1", "@testing-library/user-event": "~12.6.0", "@types/geojson": "~7946.0.7", "@types/jest": "~26.0.20", - "@types/leaflet": "~1.5.23", + "@types/leaflet": "^1.8.0", + "@types/leaflet-draw": "^1.0.5", "@types/leaflet-fullscreen": "~1.0.6", "@types/lodash-es": "~4.17.4", "@types/node": "~14.14.31", diff --git a/app/server/index.js b/app/server/index.js index 9d68cfc3cb..f73b5366d5 100644 --- a/app/server/index.js +++ b/app/server/index.js @@ -49,9 +49,9 @@ const request = require('request'); REACT_APP_NODE_ENV: process.env.REACT_APP_NODE_ENV || 'dev', VERSION: `${process.env.VERSION || 'NA'}(build #${process.env.CHANGE_VERSION || 'NA'})`, KEYCLOAK_CONFIG: { - url: process.env.SSO_URL || 'https://dev.oidc.gov.bc.ca/auth', - realm: process.env.SSO_REALM || '35r1iman', - clientId: process.env.SSO_CLIENT_ID || 'biohubbc' + url: process.env.REACT_APP_KEYCLOAK_HOST || 'https://dev.oidc.gov.bc.ca/auth', + realm: process.env.REACT_APP_KEYCLOAK_REALM || '35r1iman', + clientId: process.env.REACT_APP_KEYCLOAK_CLIENT_ID || 'biohubbc' }, SITEMINDER_LOGOUT_URL: process.env.REACT_APP_SITEMINDER_LOGOUT_URL || 'https://logontest7.gov.bc.ca/clp-cgi/logoff.cgi', diff --git a/app/src/App.tsx b/app/src/App.tsx index f85c1abf97..e55f9a6cca 100644 --- a/app/src/App.tsx +++ b/app/src/App.tsx @@ -6,7 +6,7 @@ import { KeycloakProvider } from '@react-keycloak/web'; import AppRouter from 'AppRouter'; import { AuthStateContextProvider } from 'contexts/authStateContext'; import { ConfigContext, ConfigContextProvider } from 'contexts/configContext'; -import Keycloak, { KeycloakInstance } from 'keycloak-js'; +import Keycloak from 'keycloak-js'; import React from 'react'; import { BrowserRouter } from 'react-router-dom'; import appTheme from 'themes/appTheme'; @@ -21,11 +21,11 @@ const App: React.FC = () => { return ; } - //@ts-ignore - const keycloak: KeycloakInstance = new Keycloak(config.KEYCLOAK_CONFIG); + const keycloak = Keycloak(config.KEYCLOAK_CONFIG); return ( }> diff --git a/app/src/AppRouter.tsx b/app/src/AppRouter.tsx index bc339354ef..d605b72b8b 100644 --- a/app/src/AppRouter.tsx +++ b/app/src/AppRouter.tsx @@ -5,16 +5,15 @@ import { } from 'components/security/RouteGuards'; import { SYSTEM_ROLE } from 'constants/roles'; import AdminUsersRouter from 'features/admin/AdminUsersRouter'; -import PermitsRouter from 'features/permits/PermitsRouter'; import ProjectsRouter from 'features/projects/ProjectsRouter'; -import PublicProjectsRouter from 'features/projects/PublicProjectsRouter'; import ResourcesPage from 'features/resources/ResourcesPage'; import SearchPage from 'features/search/SearchPage'; -import PublicLayout from 'layouts/PublicLayout'; +import BaseLayout from 'layouts/BaseLayout'; import RequestSubmitted from 'pages/200/RequestSubmitted'; import AccessDenied from 'pages/403/AccessDenied'; import NotFoundPage from 'pages/404/NotFoundPage'; import AccessRequestPage from 'pages/access/AccessRequestPage'; +import { LandingPage } from 'pages/landing/LandingPage'; import LogOutPage from 'pages/logout/LogOutPage'; import React from 'react'; import { Redirect, Switch, useLocation } from 'react-router-dom'; @@ -31,39 +30,19 @@ const AppRouter: React.FC = () => { - - - - - - - - - - - - - - - - - - - + - + - - - - + + - + @@ -71,13 +50,13 @@ const AppRouter: React.FC = () => { - + - + @@ -85,24 +64,30 @@ const AppRouter: React.FC = () => { - + - + - + - + - + + + + + + + diff --git a/app/src/components/attachments/AttachmentsList.test.tsx b/app/src/components/attachments/AttachmentsList.test.tsx index 1b3929622e..88326c24a8 100644 --- a/app/src/components/attachments/AttachmentsList.test.tsx +++ b/app/src/components/attachments/AttachmentsList.test.tsx @@ -18,7 +18,7 @@ const mockBiohubApi = ((useBiohubApi as unknown) as jest.Mock { +describe.skip('AttachmentsList', () => { beforeEach(() => { // clear mocks before each test mockBiohubApi().project.getAttachmentSignedURL.mockClear(); @@ -36,7 +36,6 @@ describe('AttachmentsList', () => { fileType: AttachmentType.OTHER, lastModified: '2021-04-09 11:53:53', size: 3028, - securityToken: null, revisionCount: 1 }, { @@ -45,7 +44,6 @@ describe('AttachmentsList', () => { fileType: AttachmentType.REPORT, lastModified: '2021-04-09 11:53:53', size: 30280000, - securityToken: null, revisionCount: 1 }, { @@ -54,18 +52,17 @@ describe('AttachmentsList', () => { fileType: AttachmentType.OTHER, lastModified: '2021-04-09 11:53:53', size: 30280000000, - securityToken: null, revisionCount: 1 } ]; - it('renders correctly with no attachments', () => { + it('renders correctly with no Documents', () => { const { getByText } = render(); - expect(getByText('No Attachments')).toBeInTheDocument(); + expect(getByText('No Documents')).toBeInTheDocument(); }); - it('renders correctly with attachments (of various sizes)', async () => { + it.skip('renders correctly with attachments (of various sizes)', async () => { const { getByText } = render( ); @@ -114,81 +111,4 @@ describe('AttachmentsList', () => { expect(window.open).toHaveBeenCalledWith(signedUrl); }); }); - - it('changing pages displays the correct rows as expected', () => { - const largeAttachmentsList = [ - { ...attachmentsList[0] }, - { - ...attachmentsList[0], - id: 2, - fileName: 'filename2.test' - }, - { - ...attachmentsList[0], - id: 3, - fileName: 'filename3.test' - }, - { - ...attachmentsList[0], - id: 4, - fileName: 'filename4.test' - }, - { - ...attachmentsList[0], - id: 5, - fileName: 'filename5.test' - }, - { - ...attachmentsList[0], - id: 6, - fileName: 'filename6.test' - }, - { - ...attachmentsList[0], - id: 7, - fileName: 'filename7.test' - }, - { - ...attachmentsList[0], - id: 8, - fileName: 'filename8.test' - }, - { - ...attachmentsList[0], - id: 9, - fileName: 'filename9.test' - }, - { - ...attachmentsList[0], - id: 10, - fileName: 'filename10.test' - }, - { - ...attachmentsList[0], - id: 11, - fileName: 'filename11.test' - } - ]; - - const { getByText, queryByText, getByLabelText } = render( - - ); - - expect(getByText('filename.test')).toBeInTheDocument(); - expect(getByText('filename2.test')).toBeInTheDocument(); - expect(getByText('filename3.test')).toBeInTheDocument(); - expect(getByText('filename4.test')).toBeInTheDocument(); - expect(getByText('filename5.test')).toBeInTheDocument(); - expect(getByText('filename6.test')).toBeInTheDocument(); - expect(getByText('filename7.test')).toBeInTheDocument(); - expect(getByText('filename8.test')).toBeInTheDocument(); - expect(getByText('filename9.test')).toBeInTheDocument(); - expect(getByText('filename10.test')).toBeInTheDocument(); - expect(queryByText('filename11.test')).toBeNull(); - - fireEvent.click(getByLabelText('Next page')); - - expect(getByText('filename11.test')).toBeInTheDocument(); - expect(queryByText('filename10.test')).toBeNull(); - }); }); diff --git a/app/src/components/attachments/AttachmentsList.tsx b/app/src/components/attachments/AttachmentsList.tsx index f87112059c..3a3c62c329 100644 --- a/app/src/components/attachments/AttachmentsList.tsx +++ b/app/src/components/attachments/AttachmentsList.tsx @@ -1,5 +1,5 @@ import Box from '@material-ui/core/Box'; -import Button from '@material-ui/core/Button'; +import { grey } from '@material-ui/core/colors'; import IconButton from '@material-ui/core/IconButton'; import Link from '@material-ui/core/Link'; import ListItemIcon from '@material-ui/core/ListItemIcon'; @@ -12,41 +12,33 @@ import TableBody from '@material-ui/core/TableBody'; import TableCell from '@material-ui/core/TableCell'; import TableContainer from '@material-ui/core/TableContainer'; import TableHead from '@material-ui/core/TableHead'; -import TablePagination from '@material-ui/core/TablePagination'; import TableRow from '@material-ui/core/TableRow'; -import { - mdiDotsVertical, - mdiDownload, - mdiInformationOutline, - mdiLockOpenVariantOutline, - mdiLockOutline, - mdiTrashCanOutline -} from '@mdi/js'; +import Typography from '@material-ui/core/Typography'; +import { mdiDotsVertical, mdiInformationOutline, mdiTrashCanOutline, mdiTrayArrowDown } from '@mdi/js'; import Icon from '@mdi/react'; +import AttachmentTypeSelector from 'components/dialog/attachments/AttachmentTypeSelector'; import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; -import { DATE_FORMAT } from 'constants/dateTimeFormats'; +import { AttachmentType } from 'constants/attachments'; import { AttachmentsI18N, EditReportMetaDataI18N } from 'constants/i18n'; import { DialogContext } from 'contexts/dialogContext'; +import { IAttachmentType } from 'features/projects/view/ProjectAttachments'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { IGetProjectAttachment, IGetReportMetaData } from 'interfaces/useProjectApi.interface'; +import { IGetProjectAttachment } from 'interfaces/useProjectApi.interface'; import { IGetSurveyAttachment } from 'interfaces/useSurveyApi.interface'; -import React, { useContext, useEffect, useState } from 'react'; -import { handleChangePage, handleChangeRowsPerPage } from 'utils/tablePaginationUtils'; -import { getFormattedDate, getFormattedFileSize } from 'utils/Utils'; -import { AttachmentType } from '../../constants/attachments'; -import { IEditReportMetaForm } from '../attachments/EditReportMetaForm'; -import EditFileWithMetaDialog from '../dialog/EditFileWithMetaDialog'; -import ViewFileWithMetaDialog from '../dialog/ViewFileWithMetaDialog'; +import React, { useContext, useState } from 'react'; const useStyles = makeStyles((theme: Theme) => ({ attachmentsTable: { - '& .MuiTableCell-root': { - verticalAlign: 'middle' - } + tableLayout: 'fixed' + }, + attachmentsTableLockIcon: { + marginTop: '3px', + color: grey[600] }, - uploadMenu: { - marginTop: theme.spacing(1) + attachmentNameCol: { + overflow: 'hidden', + textOverflow: 'ellipsis' } })); @@ -54,19 +46,20 @@ export interface IAttachmentsListProps { projectId: number; surveyId?: number; attachmentsList: (IGetProjectAttachment | IGetSurveyAttachment)[]; - getAttachments: (forceFetch: boolean) => void; + selectedAttachments: IAttachmentType[]; + onCheckboxChange?: (attachmentType: IAttachmentType, add: boolean) => void; + onCheckAllChange?: (types: IAttachmentType[]) => void; + getAttachments: (forceFetch: boolean) => Promise<(IGetProjectAttachment | IGetSurveyAttachment)[] | undefined>; } const AttachmentsList: React.FC = (props) => { const classes = useStyles(); const biohubApi = useBiohubApi(); - const [rowsPerPage, setRowsPerPage] = useState(10); - const [page, setPage] = useState(0); + const [rowsPerPage] = useState(10); + const [page] = useState(0); - const [reportMetaData, setReportMetaData] = useState(null); - const [showViewFileWithMetaDialog, setShowViewFileWithMetaDialog] = useState(false); - const [showEditFileWithMetaDialog, setShowEditFileWithMetaDialog] = useState(false); + const [showViewFileWithDetailsDialog, setShowViewFileWithDetailsDialog] = useState(false); const [currentAttachment, setCurrentAttachment] = useState(null); @@ -80,7 +73,24 @@ const AttachmentsList: React.FC = (props) => { const handleViewDetailsClick = (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { setCurrentAttachment(attachment); - getReportMeta(attachment); + setShowViewFileWithDetailsDialog(true); + }; + + const refreshCurrentAttachment = async (id: number, type: string) => { + const updatedAttachments = await props.getAttachments(true); + + if (updatedAttachments) { + const cur = updatedAttachments.find((attachment) => { + if (attachment.id === id && attachment.fileType === type) { + return attachment; + } + return null; + }); + + if (cur) { + setCurrentAttachment(cur); + } + } }; const dialogContext = useContext(DialogContext); @@ -110,12 +120,6 @@ const AttachmentsList: React.FC = (props) => { onYes: () => dialogContext.setYesNoDialog({ open: false }) }; - useEffect(() => { - if (reportMetaData && currentAttachment) { - setShowViewFileWithMetaDialog(true); - } - }, [reportMetaData, currentAttachment]); - const showDeleteAttachmentDialog = (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { dialogContext.setYesNoDialog({ ...defaultYesNoDialogProps, @@ -128,25 +132,6 @@ const AttachmentsList: React.FC = (props) => { }); }; - const showToggleSecurityStatusAttachmentDialog = (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { - dialogContext.setYesNoDialog({ - ...defaultYesNoDialogProps, - dialogTitle: 'Change Security Status', - dialogText: attachment.securityToken - ? `Changing this attachment's security status to unsecured will make it accessible by all users. Are you sure you want to continue?` - : `Changing this attachment's security status to secured will restrict it to yourself and other authorized users. Are you sure you want to continue?`, - open: true, - onYes: () => { - if (attachment.securityToken) { - makeAttachmentUnsecure(attachment); - } else { - makeAttachmentSecure(attachment); - } - dialogContext.setYesNoDialog({ open: false }); - } - }); - }; - const deleteAttachment = async (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { if (!attachment?.id) { return; @@ -154,19 +139,13 @@ const AttachmentsList: React.FC = (props) => { try { if (!props.surveyId) { - await biohubApi.project.deleteProjectAttachment( - props.projectId, - attachment.id, - attachment.fileType, - attachment.securityToken - ); + await biohubApi.project.deleteProjectAttachment(props.projectId, attachment.id, attachment.fileType); } else if (props.surveyId) { await biohubApi.survey.deleteSurveyAttachment( props.projectId, props.surveyId, attachment.id, - attachment.fileType, - attachment.securityToken + attachment.fileType ); } @@ -183,26 +162,6 @@ const AttachmentsList: React.FC = (props) => { } }; - const getReportMeta = async (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { - try { - let response; - - if (props.surveyId) { - response = await biohubApi.survey.getSurveyReportMetadata(props.projectId, props.surveyId, attachment.id); - } else { - response = await biohubApi.project.getProjectReportMetadata(props.projectId, attachment.id); - } - - if (!response) { - return; - } - - setReportMetaData(response); - } catch (error) { - return error; - } - }; - const openAttachment = async (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { try { let response; @@ -235,135 +194,18 @@ const AttachmentsList: React.FC = (props) => { } }; - const openAttachmentFromReportMetaDialog = async () => { - if (currentAttachment) { - openAttachment(currentAttachment); - } - }; - - const openEditReportMetaDialog = async () => { - setShowViewFileWithMetaDialog(false); - setShowEditFileWithMetaDialog(true); - }; - - const makeAttachmentSecure = async (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { - if (!attachment || !attachment.id) { - return; - } - - try { - let response; - - if (props.surveyId) { - response = await biohubApi.survey.makeAttachmentSecure( - props.projectId, - props.surveyId, - attachment.id, - attachment.fileType - ); - } else { - response = await biohubApi.project.makeAttachmentSecure(props.projectId, attachment.id, attachment.fileType); - } - - if (!response) { - return; - } - - props.getAttachments(true); - } catch (error) { - return error; - } - }; - - const makeAttachmentUnsecure = async (attachment: IGetProjectAttachment | IGetSurveyAttachment) => { - if (!attachment || !attachment.id) { - return; - } - - try { - let response; - - if (props.surveyId) { - response = await biohubApi.survey.makeAttachmentUnsecure( - props.projectId, - props.surveyId, - attachment.id, - attachment.securityToken, - attachment.fileType - ); - } else { - response = await biohubApi.project.makeAttachmentUnsecure( - props.projectId, - attachment.id, - attachment.securityToken, - attachment.fileType - ); - } - - if (!response) { - return; - } - - props.getAttachments(true); - } catch (error) { - return error; - } - }; - - const handleDialogEditSave = async (values: IEditReportMetaForm) => { - if (!reportMetaData) { - return; - } - - const fileMeta = values; - - try { - if (props.surveyId) { - await biohubApi.survey.updateSurveyReportMetadata( - props.projectId, - props.surveyId, - reportMetaData.attachment_id, - AttachmentType.REPORT, - fileMeta, - reportMetaData.revision_count - ); - } else { - await biohubApi.project.updateProjectReportMetadata( - props.projectId, - reportMetaData.attachment_id, - AttachmentType.REPORT, - fileMeta, - reportMetaData.revision_count - ); - } - } catch (error) { - const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, dialogErrorDetails: apiError.errors, open: true }); - } finally { - setShowEditFileWithMetaDialog(false); - } - }; - return ( <> - { - setShowViewFileWithMetaDialog(false); - }} - onDownload={openAttachmentFromReportMetaDialog} - reportMetaData={reportMetaData} - attachmentSize={(currentAttachment && getFormattedFileSize(currentAttachment.size)) || '0 KB'} - /> - { - setShowEditFileWithMetaDialog(false); + { + setShowViewFileWithDetailsDialog(false); + props.getAttachments(true); }} - onSave={handleDialogEditSave} + refresh={refreshCurrentAttachment} /> @@ -372,10 +214,7 @@ const AttachmentsList: React.FC = (props) => { Name Type - File Size - Last Modified - Security - + @@ -383,31 +222,12 @@ const AttachmentsList: React.FC = (props) => { props.attachmentsList.slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage).map((row, index) => { return ( - - openAttachment(row)}> + + openAttachment(row)}> {row.fileName} {row.fileType} - {getFormattedFileSize(row.size)} - {getFormattedDate(DATE_FORMAT.ShortMediumDateFormat, row.lastModified)} - - - - - - = (props) => { })} {!props.attachmentsList.length && ( - - No Attachments + + + No Documents + )} - {props.attachmentsList.length > 0 && ( - handleChangePage(event, newPage, setPage)} - onChangeRowsPerPage={(event: React.ChangeEvent) => - handleChangeRowsPerPage(event, setPage, setRowsPerPage) - } - /> - )} ); @@ -457,8 +266,6 @@ interface IAttachmentItemMenuButtonProps { } const AttachmentItemMenuButton: React.FC = (props) => { - const classes = useStyles(); - const [anchorEl, setAnchorEl] = useState(null); const open = Boolean(anchorEl); @@ -473,15 +280,10 @@ const AttachmentItemMenuButton: React.FC = (prop <> - + = (prop }} data-testid="attachment-action-menu-download"> - + - Download File + Download Document {props.attachment.fileType === AttachmentType.REPORT && ( = (prop }} data-testid="attachment-action-menu-details"> - + - View Details + View Document Details )} = (prop }} data-testid="attachment-action-menu-delete"> - + - Delete File + Delete Document diff --git a/app/src/components/attachments/EditReportMetaForm.tsx b/app/src/components/attachments/EditReportMetaForm.tsx index 3958f675db..dc0ce495da 100644 --- a/app/src/components/attachments/EditReportMetaForm.tsx +++ b/app/src/components/attachments/EditReportMetaForm.tsx @@ -26,6 +26,7 @@ export interface IEditReportMetaForm { description: string; year_published: number; revision_count: number; + onSave?: () => void; } export const EditReportMetaFormInitialValues: IEditReportMetaForm = { diff --git a/app/src/components/attachments/FileUploadWithMeta.tsx b/app/src/components/attachments/FileUploadWithMeta.tsx index db8aa0e2da..d2322e3628 100644 --- a/app/src/components/attachments/FileUploadWithMeta.tsx +++ b/app/src/components/attachments/FileUploadWithMeta.tsx @@ -1,12 +1,16 @@ import Box from '@material-ui/core/Box'; import Typography from '@material-ui/core/Typography'; -import { ProjectSurveyAttachmentValidExtensions } from 'constants/attachments'; +import ReportMetaForm, { IReportMetaForm } from 'components/attachments/ReportMetaForm'; +import FileUpload, { IReplaceHandler } from 'components/file-upload/FileUpload'; +import { + IFileHandler, + IOnUploadSuccess, + IUploadHandler, + UploadFileStatus +} from 'components/file-upload/FileUploadItem'; +import { AttachmentType, ProjectSurveyAttachmentValidExtensions } from 'constants/attachments'; import { useFormikContext } from 'formik'; import React from 'react'; -import { AttachmentType } from '../../constants/attachments'; -import ReportMetaForm, { IReportMetaForm } from '../attachments/ReportMetaForm'; -import FileUpload, { IReplaceHandler } from './FileUpload'; -import { IFileHandler, IOnUploadSuccess, IUploadHandler, UploadFileStatus } from './FileUploadItem'; export interface IFileUploadWithMetaProps { attachmentType: AttachmentType.REPORT | AttachmentType.OTHER; diff --git a/app/src/components/attachments/ReportMeta.tsx b/app/src/components/attachments/ReportMeta.tsx new file mode 100644 index 0000000000..9df8d3022c --- /dev/null +++ b/app/src/components/attachments/ReportMeta.tsx @@ -0,0 +1,103 @@ +import { Typography } from '@material-ui/core'; +import Box from '@material-ui/core/Box'; +import Divider from '@material-ui/core/Divider'; +import Paper from '@material-ui/core/Paper'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; +import Toolbar from '@material-ui/core/Toolbar'; +import { DATE_FORMAT } from 'constants/dateTimeFormats'; +import { IGetReportDetails } from 'interfaces/useProjectApi.interface'; +import React from 'react'; +import { getFormattedDateRangeString } from 'utils/Utils'; + +const useStyles = makeStyles((theme: Theme) => ({ + docTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + overflow: 'hidden' + }, + docDL: { + margin: 0, + '& dt': { + flex: '0 0 200px', + margin: '0', + color: theme.palette.text.secondary + }, + '& dd': { + flex: '1 1 auto' + } + }, + docMetaRow: { + display: 'flex' + } +})); + +export interface IViewReportDetailsProps { + onEdit?: () => void; + onSave?: () => void; + + reportDetails: IGetReportDetails | null; +} + +const ReportMeta: React.FC = (props) => { + const classes = useStyles(); + + const reportDetails = props.reportDetails; + + return ( + <> + + + + General Information + + + + + + + + Report Title + + {reportDetails?.metadata?.title} + + + + Description + + {reportDetails?.metadata?.description} + + + + Year Published + + {reportDetails?.metadata?.year_published} + + + + Last Modified + + + {getFormattedDateRangeString( + DATE_FORMAT.ShortMediumDateFormat, + reportDetails?.metadata?.last_modified || '' + )} + + + + + Authors + + + {reportDetails?.authors?.map((author) => [author.first_name, author.last_name].join(' ')).join(', ')} + + + + + + + ); +}; + +export default ReportMeta; diff --git a/app/src/components/attachments/__snapshots__/DropZone.test.tsx.snap b/app/src/components/attachments/__snapshots__/DropZone.test.tsx.snap deleted file mode 100644 index c3c8e165e9..0000000000 --- a/app/src/components/attachments/__snapshots__/DropZone.test.tsx.snap +++ /dev/null @@ -1,80 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`DropZone matches the snapshot 1`] = ` - -
-
- -
- - - -
- Drag your files here, or - - Browse Files - -
-
-
- - Accepted files: .txt - -
-
- - Maximum file size: 50 MB - -
-
- - Maximum files: 10 - -
-
-
-
-
-
-`; diff --git a/app/src/components/boundary/InferredLocationDetails.tsx b/app/src/components/boundary/InferredLocationDetails.tsx index 6be8c15280..4e2216a383 100644 --- a/app/src/components/boundary/InferredLocationDetails.tsx +++ b/app/src/components/boundary/InferredLocationDetails.tsx @@ -1,7 +1,42 @@ import Box from '@material-ui/core/Box'; +import { grey } from '@material-ui/core/colors'; +import Divider from '@material-ui/core/Divider'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; import Typography from '@material-ui/core/Typography'; import React from 'react'; +const useStyles = makeStyles((theme: Theme) => ({ + boundaryGroup: { + clear: 'both', + overflow: 'hidden', + '&:first-child': { + marginTop: 0 + } + }, + boundaryList: { + margin: 0, + padding: 0, + listStyleType: 'none', + '& li': { + display: 'inline-block', + float: 'left' + }, + '& li + li': { + marginLeft: theme.spacing(1) + } + }, + metaSectionHeader: { + color: grey[600], + fontWeight: 700, + textTransform: 'uppercase', + '& + hr': { + marginTop: theme.spacing(0.75), + marginBottom: theme.spacing(0.75) + } + } +})); + export interface IInferredLayers { parks: string[]; nrm: string[]; @@ -14,32 +49,37 @@ export interface IInferredLocationDetailsProps { } const InferredLocationDetails: React.FC = (props) => { + const classes = useStyles(); const displayInferredLayersInfo = (data: any[], type: string) => { if (!data.length) { return; } return ( - - - {type} ({data.length}) - - - {data.map((item: string, index: number) => ( - - {item} - {index < data.length - 1 && ', '} + <> + + + {type} ({data.length}) - ))} - + + + {data.map((item: string, index: number) => ( + + {item} + {index < data.length - 1 && ', '} + + ))} + + + ); }; return ( <> {displayInferredLayersInfo(props.layers.nrm, 'Natural Resource Ministries Regions')} - {displayInferredLayersInfo(props.layers.env, 'Ministry of Environment Regions')} - {displayInferredLayersInfo(props.layers.parks, 'Parks and EcoReserves')} + {displayInferredLayersInfo(props.layers.env, 'Ministry of Environment Regions')} + {displayInferredLayersInfo(props.layers.parks, 'Parks and EcoReserves')} ); }; diff --git a/app/src/components/boundary/MapBoundary.tsx b/app/src/components/boundary/MapBoundary.tsx index f80337023d..a9e289999d 100644 --- a/app/src/components/boundary/MapBoundary.tsx +++ b/app/src/components/boundary/MapBoundary.tsx @@ -5,20 +5,22 @@ import Grid from '@material-ui/core/Grid'; import IconButton from '@material-ui/core/IconButton'; import InputLabel from '@material-ui/core/InputLabel'; import MenuItem from '@material-ui/core/MenuItem'; +import Paper from '@material-ui/core/Paper'; import Select from '@material-ui/core/Select'; import { createStyles, makeStyles } from '@material-ui/core/styles'; import Typography from '@material-ui/core/Typography'; import Alert from '@material-ui/lab/Alert'; import { mdiRefresh, mdiTrayArrowUp } from '@mdi/js'; import Icon from '@mdi/react'; -import FileUpload from 'components/attachments/FileUpload'; -import { IUploadHandler } from 'components/attachments/FileUploadItem'; import InferredLocationDetails, { IInferredLayers } from 'components/boundary/InferredLocationDetails'; import ComponentDialog from 'components/dialog/ComponentDialog'; +import FileUpload from 'components/file-upload/FileUpload'; +import { IUploadHandler } from 'components/file-upload/FileUploadItem'; import MapContainer from 'components/map/MapContainer'; import { ProjectSurveyAttachmentValidExtensions } from 'constants/attachments'; import { FormikContextType } from 'formik'; import { Feature } from 'geojson'; +import { LatLngBoundsExpression } from 'leaflet'; import get from 'lodash-es/get'; import React, { useEffect, useState } from 'react'; import { @@ -41,17 +43,10 @@ const useStyles = makeStyles(() => backgroundColor: '#eeeeee' } }, - bold: { - fontWeight: 'bold' - }, - uploadButton: { - border: '2px solid', - textTransform: 'capitalize', - fontWeight: 'bold' - }, - mapLocations: { - '& dd': { - display: 'inline-block' + mapLayerControl: { + width: '300px', + '& .MuiInputBase-root': { + height: '44px' } } }) @@ -61,7 +56,7 @@ export interface IMapBoundaryProps { name: string; title: string; mapId: string; - bounds: any[]; + bounds: LatLngBoundsExpression | undefined; formikProps: FormikContextType; } @@ -80,7 +75,7 @@ const MapBoundary: React.FC = (props) => { const [openUploadBoundary, setOpenUploadBoundary] = useState(false); const [shouldUpdateBounds, setShouldUpdateBounds] = useState(false); - const [updatedBounds, setUpdatedBounds] = useState(undefined); + const [updatedBounds, setUpdatedBounds] = useState(undefined); const [selectedLayer, setSelectedLayer] = useState(''); const [inferredLayersInfo, setInferredLayersInfo] = useState({ parks: [], @@ -111,11 +106,11 @@ const MapBoundary: React.FC = (props) => { <> setOpenUploadBoundary(false)}> - If uploading a shapefile, it must be configured with a valid projection. + If importing a shapefile, it must be configured with a valid projection. = (props) => { - {title} - - - Define your boundary by selecting a boundary from an existing layer or by uploading KML file or shapefile. - - - - To select a boundary from an existing layer, select a layer from the dropdown, click a boundary on the map - and click 'Add Boundary'. - - - - - - - - Select Layer - - - - {selectedLayer && ( + + {title} + + + Import or select a boundary from existing map layers. To select an existing boundary, choose a map layer below + and click a boundary on the map. + + + - )} - - - {get(errors, name) && {get(errors, name)}} - - - setFieldValue(name, newGeo) - }} - bounds={(shouldUpdateBounds && updatedBounds) || bounds} - selectedLayer={selectedLayer} - setInferredLayersInfo={setInferredLayersInfo} - /> - {get(values, name) && get(values, name).length > 0 && ( - - { - setUpdatedBounds(calculateUpdatedMapBounds(get(values, name))); - setShouldUpdateBounds(true); - }}> - - + + + Map Layers + + + + + {selectedLayer && ( + + )} + + + {get(errors, name) && ( + + {get(errors, name)} )} - {get(errors, name) && ( - - {get(errors, name)} + + + setFieldValue(name, newGeo)} + bounds={(shouldUpdateBounds && updatedBounds) || bounds} + selectedLayer={selectedLayer} + setInferredLayersInfo={setInferredLayersInfo} + /> + {get(values, name) && get(values, name).length > 0 && ( + + { + setUpdatedBounds(calculateUpdatedMapBounds(get(values, name))); + setShouldUpdateBounds(true); + }}> + + + + )} - )} - {!Object.values(inferredLayersInfo).every((item: any) => !item.length) && ( - <> - - Boundary Information - -
+ {!Object.values(inferredLayersInfo).every((item: any) => !item.length) && ( + -
- - )} +
+ )} +
); diff --git a/app/src/components/chips/RequestChips.tsx b/app/src/components/chips/RequestChips.tsx index ab3e2349f0..ff3a66daae 100644 --- a/app/src/components/chips/RequestChips.tsx +++ b/app/src/components/chips/RequestChips.tsx @@ -9,7 +9,7 @@ const useStyles = makeStyles((theme: Theme) => ({ color: 'white' }, chipPending: { - backgroundColor: theme.palette.primary.main + backgroundColor: theme.palette.error.main }, chipActioned: { backgroundColor: theme.palette.success.main @@ -32,7 +32,7 @@ export const AccessStatusChip: React.FC<{ status: string; chipProps?: Partial ({ - dialogIconContainer: { - marginTop: theme.spacing(5), - marginBottom: theme.spacing(2), - marginLeft: 'auto', - marginRight: 'auto', - width: '128px', - height: '128px', - borderRadius: '64px', - background: 'rgba(9,14,211,0.05)' - }, - dialogIcon: { - color: '#0972D3' - } -})); - -export const SplashDialog = () => { - const classes = useStyles(); - const [open, setOpen] = useState(window.sessionStorage.getItem('sims_splash_screen') !== 'dontshow'); - - useEffect(() => { - function showSplashScreen() { - if (window.sessionStorage.getItem('sims_splash_screen') === 'dontshow') { - setOpen(false); - return; - } - - setOpen(true); - } - - function handleStorageEvent(this: Window) { - showSplashScreen(); - } - - showSplashScreen(); - - window.addEventListener('storage', handleStorageEvent); - - return () => window.removeEventListener('storage', handleStorageEvent); - }, []); - - return ( - window.sessionStorage.setItem('sims_splash_screen', 'dontshow')} - data-testid="splash-dialog" - aria-labelledby="alert-dialog-title" - aria-describedby="alert-dialog-description"> - - - - Important Notice for Data Submissions - - - - This application will be unavailable from February 6th - 13th, 2023 to support an upcoming release. - -

- Moose data formatted using the Moose Aerial Stratified Random Block Composition Survey 2.5 template,{' '} - must be submitted before February 6th, 2023. This template will not be supported after this - date. -

-

- Future moose data submissions must use one of the new 2.0 templates provided in the resources section of - this application. The new templates will be available on February 14th, 2023. -

-

- Questions or comments? Contact us at biohub@gov.bc.ca. -

-
-
- - - -
- ); -}; - -export const CloseSplashDialog = () => { - window.sessionStorage.setItem('sims_splash_screen', 'dontshow'); - window.dispatchEvent(new Event('storage')); -}; - -export const OpenSplashDialog = () => { - window.sessionStorage.setItem('sims_splash_screen', ''); - window.dispatchEvent(new Event('storage')); -}; diff --git a/app/src/components/dialog/ViewFileWithMetaDialog.tsx b/app/src/components/dialog/ViewFileWithMetaDialog.tsx deleted file mode 100644 index e558e4ea89..0000000000 --- a/app/src/components/dialog/ViewFileWithMetaDialog.tsx +++ /dev/null @@ -1,108 +0,0 @@ -import { DialogTitle } from '@material-ui/core'; -import Box from '@material-ui/core/Box'; -import Button from '@material-ui/core/Button'; -import Dialog, { DialogProps } from '@material-ui/core/Dialog'; -import DialogActions from '@material-ui/core/DialogActions'; -import DialogContent from '@material-ui/core/DialogContent'; -import Grid from '@material-ui/core/Grid'; -import Typography from '@material-ui/core/Typography'; -import { mdiPencilOutline, mdiTrayArrowDown } from '@mdi/js'; -import Icon from '@mdi/react'; -import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { IGetReportMetaData } from 'interfaces/useProjectApi.interface'; -import React, { useState } from 'react'; -import { getFormattedDateRangeString } from 'utils/Utils'; - -export interface IViewFileWithMetaDialogProps { - open: boolean; - onEdit?: () => void; - onClose: () => void; - onDownload: () => void; - reportMetaData: IGetReportMetaData | null; - attachmentSize: string; - dialogProps?: DialogProps; -} - -/** - * General information content for a project. - * - * @return {*} - */ -const ViewFileWithMetaDialog: React.FC = (props) => { - const { reportMetaData } = props; - - const [showEditButton] = useState(!!props.onEdit); - - if (!props.open) { - return <>; - } - - return ( - <> - - {reportMetaData?.title} - - - - - - Summary - - - {reportMetaData?.description} - - - - - Year Published - - - {reportMetaData?.year_published} - - - - - Last Modified - - - {getFormattedDateRangeString(DATE_FORMAT.ShortMediumDateFormat, reportMetaData?.last_modified || '')} - - - - - Authors - - - {reportMetaData?.authors?.map((author) => [author.first_name, author.last_name].join(' ')).join(', ')} - - - - - - - - {showEditButton && ( - - )} - - - - - ); -}; - -export default ViewFileWithMetaDialog; diff --git a/app/src/components/dialog/YesNoDialog.tsx b/app/src/components/dialog/YesNoDialog.tsx index b899d87d84..6f150eddf8 100644 --- a/app/src/components/dialog/YesNoDialog.tsx +++ b/app/src/components/dialog/YesNoDialog.tsx @@ -101,6 +101,7 @@ const YesNoDialog: React.FC = (props) => { return ( @@ -981,7 +1034,7 @@ exports[`MapContainer sets the bounds of the geo being passed in successfully 1` >
@@ -1108,6 +1161,17 @@ exports[`MapContainer sets the bounds of the geo being passed in successfully 1` Draw a rectangle + + + Draw a marker + +
    @@ -1185,8 +1248,9 @@ exports[`MapContainer sets the bounds of the geo being passed in successfully 1`
    diff --git a/app/src/components/map/components/AdditionalLayers.tsx b/app/src/components/map/components/AdditionalLayers.tsx new file mode 100644 index 0000000000..f13536c274 --- /dev/null +++ b/app/src/components/map/components/AdditionalLayers.tsx @@ -0,0 +1,22 @@ +import React, { Fragment, ReactElement } from 'react'; + +export type IAdditionalLayers = ReactElement[]; + +interface IAdditionalLayersProps { + layers: IAdditionalLayers; +} + +/** + * Renders any additional layer feature groups + */ +const AdditionalLayers: React.FC = (props) => { + return ( + <> + {props.layers.map((additionalLayer: ReactElement, index: number) => ( + {additionalLayer} + ))} + + ); +}; + +export default AdditionalLayers; diff --git a/app/src/components/map/components/BaseLayerControls.tsx b/app/src/components/map/components/BaseLayerControls.tsx new file mode 100644 index 0000000000..3b5260d6e5 --- /dev/null +++ b/app/src/components/map/components/BaseLayerControls.tsx @@ -0,0 +1,20 @@ +import React from 'react'; +import { LayersControl, TileLayer } from 'react-leaflet'; + +const BaseLayerControls: React.FC = () => { + return ( + <> + + + + + + + + ); +}; + +export default BaseLayerControls; diff --git a/app/src/components/map/components/Bounds.tsx b/app/src/components/map/components/Bounds.tsx new file mode 100644 index 0000000000..f204e6d5d0 --- /dev/null +++ b/app/src/components/map/components/Bounds.tsx @@ -0,0 +1,53 @@ +import { Feature, Polygon } from 'geojson'; +import { LatLngBoundsExpression } from 'leaflet'; +import { useMap, useMapEvents } from 'react-leaflet'; +import { getFeatureObjectFromLatLngBounds } from 'utils/Utils'; + +export interface ISetMapBoundsProps { + bounds?: LatLngBoundsExpression; + zoom?: number; +} + +export const SetMapBounds: React.FC> = (props) => { + const map = useMap(); + // Set bounds if provided, ignore zoom + if (props.bounds) { + map.fitBounds(props.bounds); + } else if (props.zoom) { + // Set zoom if provided + map.setZoom(props.zoom); + } + + return null; +}; + +export type IMapBoundsOnChange = (bounds: Feature, zoom: number) => void; + +export interface IGetMapBoundsProps { + onChange: IMapBoundsOnChange; +} + +export const GetMapBounds: React.FC> = (props) => { + const { onChange } = props; + + const map = useMapEvents({ + zoomend() { + const latLngBounds = map.getBounds(); + map.closePopup(); + + const featureBounds = getFeatureObjectFromLatLngBounds(latLngBounds); + + onChange(featureBounds, map.getZoom()); + }, + moveend() { + const latLngBounds = map.getBounds(); + map.closePopup(); + + const featureBounds = getFeatureObjectFromLatLngBounds(latLngBounds); + + onChange(featureBounds, map.getZoom()); + } + }); + + return null; +}; diff --git a/app/src/components/map/components/DrawControls.tsx b/app/src/components/map/components/DrawControls.tsx new file mode 100644 index 0000000000..598aee1d10 --- /dev/null +++ b/app/src/components/map/components/DrawControls.tsx @@ -0,0 +1,261 @@ +import { useLeafletContext } from '@react-leaflet/core'; +import YesNoDialog from 'components/dialog/YesNoDialog'; +import { Feature } from 'geojson'; +import { useDeepCompareEffect } from 'hooks/useDeepCompareEffect'; +import * as L from 'leaflet'; +import 'leaflet-draw'; +import 'leaflet/dist/leaflet.css'; +import React, { useEffect, useRef, useState } from 'react'; + +/* + * Supported draw events. + */ +const eventHandlers = { + onCreated: 'draw:created', + onEdited: 'draw:edited', + onDrawStart: 'draw:drawstart', + onDrawStop: 'draw:drawstop', + onDrawVertex: 'draw:drawvertex', + onEditStart: 'draw:editstart', + onEditMove: 'draw:editmove', + onEditResize: 'draw:editresize', + onEditVertex: 'draw:editvertex', + onEditStop: 'draw:editstop', + onDeleted: 'draw:deleted', + onDeleteStart: 'draw:deletestart', + onDeleteStop: 'draw:deletestop', + onMounted: 'draw:mounted' +}; + +/** + * Custom subset of `L.Control.DrawConstructorOptions` that omits `edit.Feature` as this will be added automatically + * by `DrawControls`. + * + * @export + * @interface IDrawControlsOptions + */ +export interface IDrawControlsOptions { + position?: L.Control.DrawConstructorOptions['position']; + draw?: L.Control.DrawConstructorOptions['draw']; + edit?: Omit; +} + +export type IDrawControlsOnChange = (features: Feature[]) => void; + +export interface IDrawControlsProps { + /** + * Initial features to add to the map. These features will be editable. + * + * @type {Feature[]} + * @memberof IDrawControlsProps + */ + initialFeatures?: Feature[]; + /** + * Options to control the draw/edit UI controls. + * + * @type {IDrawControlsOptions} + * @memberof IDrawControlsProps + */ + options?: IDrawControlsOptions; + /** + * Callback triggered anytime a feature is added or updated or removed. + * + * @type {IDrawControlsOnChange} + * @memberof IDrawControlsProps + */ + onChange?: IDrawControlsOnChange; + /** + * Clear any previously drawn features (layers) before drawing the next one. + * The result is that only 1 feature will be shown at a time. + * + * @type {boolean} + * @memberof IDrawControlsProps + */ + clearOnDraw?: boolean; + /** + * If true, a modal will appear to confirm deletions. + */ + confirmDeletion?: boolean; +} + +const DrawControls: React.FC> = (props) => { + const context = useLeafletContext(); + const [deleteEvent, setDeleteEvent] = useState(null); + const showDeleteModal = Boolean(deleteEvent); + + /** + * Fetch the layer used by the draw controls. + * + * @return {*} {L.FeatureGroup} + */ + const getFeatureGroup = () => { + const container = context.layerContainer; + + if (!container || !(container instanceof L.FeatureGroup)) { + throw new Error('Failed to get map layer'); + } + + return container; + }; + + /** + * Collects all current feature layers, and calls `props.onChange`. + * Adds `radius` to the properties if the source feature is a circle type. + */ + const handleFeatureUpdate = () => { + const container = getFeatureGroup(); + + const features: Feature[] = []; + + container.getLayers().forEach((layer: any) => { + const geoJSON = layer.toGeoJSON(); + + if (layer._mRadius) { + geoJSON.properties.radius = layer.getRadius(); + } + + features.push(geoJSON); + }); + + props.onChange?.([...features]); + }; + + /** + * Build and return a drawing map control. + * + * @return {*} {L.Control.Draw} + */ + const getDrawControls = (): L.Control.Draw => { + const options: L.Control.DrawConstructorOptions = { + edit: { + ...props.options?.edit, + // Add FeatureGroup automatically + featureGroup: getFeatureGroup() + } + }; + + options.draw = { ...props.options?.draw }; + + options.position = props.options?.position || 'topright'; + + return new L.Control.Draw(options); + }; + + /** + * Handle create events. + * + * @param {L.DrawEvents.Created} event + */ + const onDrawCreate = (event: L.DrawEvents.Created) => { + const container = getFeatureGroup(); + + if (props.clearOnDraw) { + // Clear previous layers + container.clearLayers(); + } + + container.addLayer(event.layer); + handleFeatureUpdate(); + }; + + /** + * Handle edit/delete events. + */ + const onDrawEditDelete = (_event?: L.LeafletEvent) => { + handleFeatureUpdate(); + }; + + /** + * Registers/draws features. + * + * @param {Feature[]} [features] + * @return {*} + */ + const drawInitialFeatures = () => { + const features: Feature[] = props.initialFeatures || []; + if (features.length === 0) { + return; + } + + const container = getFeatureGroup(); + + container.clearLayers(); + + features?.forEach((item: Feature) => { + L.geoJSON(item, { + pointToLayer: (feature, latlng) => { + if (feature.properties?.radius) { + return new L.Circle([latlng.lat, latlng.lng], feature.properties.radius); + } + + return new L.Marker([latlng.lat, latlng.lng]); + }, + onEachFeature: function (_feature, layer) { + container.addLayer(layer); + } + }); + }); + }; + + const cancelRemoveFeatures = () => { + setDeleteEvent(null); + drawInitialFeatures(); + }; + + useEffect(() => { + const { map } = context; + + // Remove any existing event handlers + map.removeEventListener(eventHandlers.onCreated); + map.removeEventListener(eventHandlers.onEdited); + map.removeEventListener(eventHandlers.onDeleted); + + // Register draw event handlers + map.on(eventHandlers.onCreated, onDrawCreate as L.LeafletEventHandlerFn); + map.on(eventHandlers.onEdited, onDrawEditDelete); + map.on(eventHandlers.onDeleted, (event) => { + if (props.confirmDeletion) { + setDeleteEvent(event); + return; + } + onDrawEditDelete(event); + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [props.options, props.onChange, props.clearOnDraw]); + + useDeepCompareEffect(() => { + drawInitialFeatures(); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [props.initialFeatures]); + + const drawControlsRef = useRef(getDrawControls()); + + useDeepCompareEffect(() => { + const { map } = context; + // Update draw control + drawControlsRef.current.remove(); + drawControlsRef.current = getDrawControls(); + drawControlsRef.current.addTo(map); + // eslint-disable-next-line react-hooks/exhaustive-deps + }, [props.options]); + + if (!props.confirmDeletion) { + return null; + } + + return ( + cancelRemoveFeatures()} + onNo={() => cancelRemoveFeatures()} + onYes={() => { + onDrawEditDelete(deleteEvent || undefined); + setDeleteEvent(null); + }} + /> + ); +}; + +export default DrawControls; diff --git a/app/src/components/map/components/EventHandler.tsx b/app/src/components/map/components/EventHandler.tsx new file mode 100644 index 0000000000..97639797f4 --- /dev/null +++ b/app/src/components/map/components/EventHandler.tsx @@ -0,0 +1,16 @@ +import { LeafletEventHandlerFnMap } from 'leaflet'; +import { useMapEvents } from 'react-leaflet'; + +export interface IEventHandlerProps { + eventHandlers?: LeafletEventHandlerFnMap; +} + +const EventHandler = (props: IEventHandlerProps) => { + useMapEvents({ + ...props.eventHandlers + }); + + return null; +}; + +export default EventHandler; diff --git a/app/src/components/map/components/FullScreenScrollingEventHandler.tsx b/app/src/components/map/components/FullScreenScrollingEventHandler.tsx new file mode 100644 index 0000000000..146e8e9d3d --- /dev/null +++ b/app/src/components/map/components/FullScreenScrollingEventHandler.tsx @@ -0,0 +1,64 @@ +import { LatLngBoundsExpression } from 'leaflet'; +import { useMap } from 'react-leaflet'; + +export interface IFullScreenScrollingEventHandlerProps { + /** + * Whether or not scroll wheel zooming is enabled. + * + * Usage: + * - If `true`, scroll wheel zooming will always be enabled regardless of mode. + * - If `false`, scroll wheel zooming will only be enabled in full screen mode. + * + * @type {boolean} + * @memberof IFullScreenScrollingEventHandlerProps + */ + scrollWheelZoom?: boolean; + /** + * Bounds to set when exiting full screen mode. + * + * @type {LatLngBoundsExpression} + * @memberof IFullScreenScrollingEventHandlerProps + */ + bounds?: LatLngBoundsExpression; +} + +/** + * Special event handler that triggers when the map enters and exists full screen mode. + * + * @param {*} props + * @return {*} + */ +const FullScreenScrollingEventHandler: React.FC> = ( + props +) => { + const map = useMap(); + + if (props.scrollWheelZoom) { + map.scrollWheelZoom.enable(); + } else { + map.scrollWheelZoom.disable(); + } + + map.on('fullscreenchange', function () { + if (map.isFullscreen()) { + if (!props.scrollWheelZoom) { + // don't change scroll wheel zoom settings if it was enabled by default via props + map.scrollWheelZoom.enable(); + } + } else { + if (!props.scrollWheelZoom) { + // don't change scroll wheel zoom settings if it was enabled by default via props + map.scrollWheelZoom.disable(); + } + + if (props.bounds) { + // reset bounds, if provided, on exit fullscreen + map.fitBounds(props.bounds); + } + } + }); + + return null; +}; + +export default FullScreenScrollingEventHandler; diff --git a/app/src/components/map/components/MarkerCluster.tsx b/app/src/components/map/components/MarkerCluster.tsx new file mode 100644 index 0000000000..338dd80c67 --- /dev/null +++ b/app/src/components/map/components/MarkerCluster.tsx @@ -0,0 +1,145 @@ +import { createLayerComponent } from '@react-leaflet/core'; +import L, { LatLngExpression } from 'leaflet'; +import 'leaflet.markercluster'; +import React, { ReactElement } from 'react'; +import { FeatureGroup, LayersControl, MarkerProps, Popup, PopupProps, Tooltip, TooltipProps } from 'react-leaflet'; + +export interface IMarker { + position: LatLngExpression; + count: number; + key: string | number; + MarkerProps?: Partial; + popup?: ReactElement; + PopupProps?: Partial; + tooltip?: ReactElement; + TooltipProps?: Partial; +} + +export interface IMarkerLayer { + layerName: string; + markers: IMarker[]; +} + +export interface IMarkerLayersProps { + layers?: IMarkerLayer[]; +} + +const makeCountIcon = (count: number) => { + return L.divIcon({ + html: `
    ${count}
    `, + className: 'marker-cluster marker-cluster-small', + iconSize: new L.Point(24, 24) + }); +}; + +const CountMarker: any = L.Marker.extend({ + options: { + count: 1 + }, + + setCount(s: number) { + this.options.count = s; + }, + + initialize(latlng: number[], { count, ...options }: { count: number }) { + L.Util.setOptions(this, { + count, + ...options + }); + + (L.CircleMarker.prototype as any).initialize.call(this, latlng, { + count, + ...options, + icon: makeCountIcon(count) + }); + } +}); + +const Marker = createLayerComponent void }, MarkerProps & { count: number }>( + ({ position, ...options }: MarkerProps & { count: number }, ctx) => { + const instance = new CountMarker(position, options); + return { + instance, + context: { ...ctx, overlayContainer: instance } + }; + }, + (marker, props, prevProps) => { + if (props.count !== prevProps.count) { + marker.setCount(props.count); + } + if (props.position !== prevProps.position) { + marker.setLatLng(props.position); + } + + if (props.icon != null && props.icon !== prevProps.icon) { + marker.setIcon(props.icon); + } + + if (props.zIndexOffset != null && props.zIndexOffset !== prevProps.zIndexOffset) { + marker.setZIndexOffset(props.zIndexOffset); + } + + if (props.opacity != null && props.opacity !== prevProps.opacity) { + marker.setOpacity(props.opacity); + } + + if (marker.dragging != null && props.draggable !== prevProps.draggable) { + if (props.draggable === true) { + marker.dragging.enable(); + } else { + marker.dragging.disable(); + } + } + } +); + +const MarkerCluster: React.FC> = (props) => { + if (!props.layers?.length) { + return null; + } + + const layerControls: ReactElement[] = []; + + props.layers.forEach((layer, index) => { + if (!layer.markers?.length) { + return; + } + + layerControls.push( + + + {layer.markers.map((item) => { + const id = item.key; + return ( + + {item.tooltip && ( + + {item.tooltip} + + )} + {item.popup && ( + + {item.popup} + + )} + + ); + })} + + + ); + }); + + return <>{layerControls}; +}; + +export default MarkerCluster; diff --git a/app/src/components/map/components/StaticLayers.tsx b/app/src/components/map/components/StaticLayers.tsx new file mode 100644 index 0000000000..3f8421afb0 --- /dev/null +++ b/app/src/components/map/components/StaticLayers.tsx @@ -0,0 +1,90 @@ +import { Feature } from 'geojson'; +import * as L from 'leaflet'; +import React, { ReactElement } from 'react'; +import { + FeatureGroup, + GeoJSON, + GeoJSONProps, + LayersControl, + Popup, + PopupProps, + Tooltip, + TooltipProps +} from 'react-leaflet'; + +export interface IStaticLayerFeature { + geoJSON: Feature; + key?: string | number; + GeoJSONProps?: Partial; + popup?: ReactElement; + PopupProps?: Partial; + tooltip?: ReactElement; + TooltipProps?: Partial; +} + +export interface IStaticLayer { + layerName: string; + features: IStaticLayerFeature[]; +} + +export interface IStaticLayersProps { + layers?: IStaticLayer[]; +} + +const StaticLayers: React.FC> = (props) => { + if (!props.layers?.length) { + return null; + } + + const layerControls: ReactElement[] = []; + + props.layers.forEach((layer, index) => { + if (!layer.features?.length) { + return; + } + + layerControls.push( + + + {layer.features.map((item, index) => { + const id = item.key || item.geoJSON.id || index; + + return ( + { + if (feature.properties?.radius) { + return new L.Circle([latlng.lat, latlng.lng], feature.properties.radius); + } + + return new L.Marker([latlng.lat, latlng.lng]); + }} + data={item.geoJSON} + {...item.GeoJSONProps}> + {item.tooltip && ( + + {item.tooltip} + + )} + {item.popup && ( + + {item.popup} + + )} + + ); + })} + + + ); + }); + + return <>{layerControls}; +}; + +export default StaticLayers; diff --git a/app/src/components/map/wfs-utils.tsx b/app/src/components/map/wfs-utils.tsx new file mode 100644 index 0000000000..7a062fcfd3 --- /dev/null +++ b/app/src/components/map/wfs-utils.tsx @@ -0,0 +1,302 @@ +import { Feature, Geometry, MultiPolygon, Point, Polygon, Position } from 'geojson'; +import React from 'react'; +import { ReProjector } from 'reproj-helper'; +import { + getInferredLayersInfoByProjectedGeometry, + getInferredLayersInfoByWFSFeature, + getLayerTypesToSkipByProjectedGeometry +} from 'utils/mapLayersHelpers'; +import { defaultWFSParams, IWFSParams } from './WFSFeatureGroup'; + +/** + * Alter the projection of an array of features, from EPSG:4326 to EPSG:3005 (BC Albers). + * + * @param {Feature[]} geos + * @return {*} {Promise[]} + */ +export const changeProjections = (geos: Feature[]): Promise[] => { + const reprojector = new ReProjector(); + + return geos.map( + (geo: Feature) => reprojector.feature(geo).from('EPSG:4326').to('EPSG:3005').project() as Promise + ); +}; + +/** + * Asserts whether a Geometry object is a MultiPolygon or not. + * @param geometry A geo-json Geometry object. + * @returns `true` if `geometry.type === 'MultiPolygon'`, false otherwise. + */ +const isMultiPolygon = (geometry: Geometry): geometry is MultiPolygon => { + return geometry.type === 'MultiPolygon'; +}; + +/** + * Asserts whether a Geometry object is a Polygon or not. + * @param geometry A geo-json Geometry object. + * @returns `true` if `geometry.type === 'Polygon'`, false otherwise. + */ +const isPolygon = (geometry: Geometry): geometry is Polygon => { + return geometry.type === 'Polygon'; +}; + +/** + * Asserts whether a Geometry object is a Point or not. + * @param geometry A geo-json Geometry object. + * @returns `true` if `geometry.type === 'Point'`, false otherwise. + */ +const isPoint = (geometry: Geometry): geometry is Point => { + return geometry.type === 'Point'; +}; + +/** + * Generate the coordinates string for the reprojected geometries based on geometry type + * + * This is needed because the query for filtering results by geometry and layer(s) intersection + * is done using CQL_FILTER (https://docs.geoserver.org/master/en/user/services/wfs/vendor.html) + * and this function takes our projected geometry and converts it into a valid CQL-compatible coordinates string + * + * @param {Feature} projectedGeometry + * @returns {string} formatted coordinates string + * + */ +export const generateCoordinatesString = (projectedGeometry: Geometry) => { + let coordinatesString = ''; + + if (isMultiPolygon(projectedGeometry)) { + const coordinatesArray: Position[][][] = projectedGeometry.coordinates; + coordinatesString += '((('; + + coordinatesArray.forEach((coordinateArray: Position[][], arrayIndex: number) => { + coordinateArray[0].forEach((coordinatePoint: Position, index: number) => { + coordinatesString += `${coordinatePoint[0]} ${coordinatePoint[1]}`; + + if (index !== coordinateArray[0].length - 1) { + coordinatesString += ','; + } else if (arrayIndex !== coordinatesArray.length - 1) { + coordinatesString += ')),'; + } + }); + + if (arrayIndex !== coordinatesArray.length - 1) { + coordinatesString += '(('; + } + }); + + coordinatesString += ')))'; + } else if (isPolygon(projectedGeometry)) { + coordinatesString += '(('; + const coordinatesArray: Position[][] = projectedGeometry.coordinates; + + coordinatesArray[0].forEach((coordinatePoint: Position, index: number) => { + coordinatesString += `${coordinatePoint[0]} ${coordinatePoint[1]}`; + + if (index !== coordinatesArray[0].length - 1) { + coordinatesString += ','; + } else { + coordinatesString += '))'; + } + }); + } else if (isPoint(projectedGeometry)) { + const coordinatesArray: Position = projectedGeometry.coordinates; + coordinatesString += `(${coordinatesArray[0]} ${coordinatesArray[1]})`; + } + + return coordinatesString; +}; + +/** + * Construct a WFS url to fetch layer information. + * + * @param {string} typeName layer name + * @param {IWFSParams} [wfsParams=defaultWFSParams] wfs url parameters. Will use defaults specified in + * `defaultWFSParams` for any properties not provided. + * @return {*} + */ +const buildWFSURL = (typeName: string, wfsParams: IWFSParams = defaultWFSParams) => { + const params = { ...defaultWFSParams, ...wfsParams }; + + return `${params.url}?service=WFS&&version=${params.version}&request=${params.request}&typeName=${typeName}&outputFormat=${params.outputFormat}&srsName=${params.srsName}`; +}; + +export interface IWFSFeatureDetails { + parks?: string[]; + nrm?: string[]; + env?: string[]; + wmu?: string[]; +} + +/* + Function to get WFS feature details based on the existing map geometries + and layer types/filter criteria +*/ +export const createGetFeatureDetails = ( + externalApiPost: (url: string, body: any) => Promise<{ features?: Feature[] }> +) => async (typeNames: string[], mapGeometries: Feature[], wfsParams?: IWFSParams): Promise => { + const parksInfo: Set = new Set(); // Parks and Eco-Reserves + const nrmInfo: Set = new Set(); // NRM Regions + const envInfo: Set = new Set(); // ENV Regions + const wmuInfo: Set = new Set(); // Wildlife Management Units + let inferredLayersInfo = { + parksInfo, + nrmInfo, + envInfo, + wmuInfo + }; + + // Convert all geometries to BC Albers projection + const reprojectedGeometries = await Promise.all(changeProjections(mapGeometries)); + + const wfsPromises: Promise<{ features?: Feature[] }>[] = []; + reprojectedGeometries.forEach((projectedGeo) => { + let filterCriteria = ''; + const coordinatesString = generateCoordinatesString(projectedGeo.geometry); + + filterCriteria = `${projectedGeo.geometry.type}${coordinatesString}`; + inferredLayersInfo = getInferredLayersInfoByProjectedGeometry(projectedGeo, inferredLayersInfo); + const layerTypesToSkip = getLayerTypesToSkipByProjectedGeometry(projectedGeo); + + // Make Open Maps API call to retrieve intersecting features based on geometry and filter criteria + typeNames.forEach((typeName: string) => { + if (!layerTypesToSkip.includes(typeName)) { + const url = buildWFSURL(typeName, wfsParams); + const geoFilterType = layerGeoFilterTypeMappings[typeName]; + const filterData = `INTERSECTS(${geoFilterType}, ${filterCriteria})`; + + const requestBody = new URLSearchParams(); + requestBody.append('CQL_FILTER', filterData); + + wfsPromises.push( + /* catch and ignore errors */ + externalApiPost(url, requestBody).catch(() => {}) as Promise<{ features?: Feature[] }> + ); + } + }); + }); + const wfsResult = await Promise.all(wfsPromises); + + wfsResult.forEach((item: { features?: Feature[] }) => { + item?.features?.forEach((feature: Feature) => { + inferredLayersInfo = getInferredLayersInfoByWFSFeature(feature, inferredLayersInfo); + }); + }); + + if (!inferredLayersInfo) { + return {}; + } + + return { + parks: Array.from(inferredLayersInfo.parksInfo), + nrm: Array.from(inferredLayersInfo.nrmInfo), + env: Array.from(inferredLayersInfo.envInfo), + wmu: Array.from(inferredLayersInfo.wmuInfo) + }; +}; + +/* + Because different OpenMaps layers are identified using different keys + - Parks and NRM regions use the key SHAPE + - ENV regions and WMU use the key GEOMETRY +*/ +export const layerGeoFilterTypeMappings = { + 'pub:WHSE_TANTALIS.TA_PARK_ECORES_PA_SVW': 'SHAPE', + 'pub:WHSE_ADMIN_BOUNDARIES.ADM_NR_REGIONS_SPG': 'SHAPE', + 'pub:WHSE_ADMIN_BOUNDARIES.EADM_WLAP_REGION_BND_AREA_SVW': 'GEOMETRY', + 'pub:WHSE_WILDLIFE_MANAGEMENT.WAA_WILDLIFE_MGMT_UNITS_SVW': 'GEOMETRY' +}; + +export const wfsInferredLayers = [ + 'pub:WHSE_TANTALIS.TA_PARK_ECORES_PA_SVW', + 'pub:WHSE_ADMIN_BOUNDARIES.ADM_NR_REGIONS_SPG', + 'pub:WHSE_ADMIN_BOUNDARIES.EADM_WLAP_REGION_BND_AREA_SVW', + 'pub:WHSE_WILDLIFE_MANAGEMENT.WAA_WILDLIFE_MGMT_UNITS_SVW' +]; + +/* + Because there is not a 1:1 mapping between the ENV and NRM regions + As can be seen, there are 2 ENV regions that map to the same NRM region +*/ +export const envToNrmRegionsMapping = { + '1- Vancouver Island': 'West Coast Natural Resource Region', + '2- Lower Mainland': 'South Coast Natural Resource Region', + '3- Thompson': 'Thompson-Okanagan Natural Resource Region', + '8- Okanagan': 'Thompson-Okanagan Natural Resource Region', + '4- Kootenay': 'Kootenay-Boundary Natural Resource Region', + '5- Cariboo': 'Cariboo Natural Resource Region', + '6- Skeena': 'Skeena Natural Resource Region', + '7- Omineca': 'Omineca Natural Resource Region', + '9- Peace': 'Northeast Natural Resource Region' +}; + +export const layerContentHandlers = { + 'pub:WHSE_WILDLIFE_MANAGEMENT.WAA_WILDLIFE_MGMT_UNITS_SVW': { + featureKeyHandler: (feature: Feature) => feature?.properties?.OBJECTID, + popupContentHandler: (feature: Feature) => { + if (!feature || !feature.properties) { + return { tooltip: 'Unparsable Feature', content: [] }; + } + + const tooltip = `${feature.properties.WILDLIFE_MGMT_UNIT_ID} - ${feature.properties.GAME_MANAGEMENT_ZONE_ID} - ${feature.properties.GAME_MANAGEMENT_ZONE_NAME}`; + + const content = ( + <> +
    {`Wildlife Management Unit: ${feature.properties.WILDLIFE_MGMT_UNIT_ID}`}
    +
    {`Game Management Zone: ${feature.properties.GAME_MANAGEMENT_ZONE_ID}`}
    +
    {`Game Management Zone Name: ${feature.properties.GAME_MANAGEMENT_ZONE_NAME}`}
    +
    {`Region Area: ${(feature.properties.FEATURE_AREA_SQM / 10000).toFixed( + 0 + )} ha`}
    + + ); + + return { tooltip, content }; + } + }, + 'pub:WHSE_TANTALIS.TA_PARK_ECORES_PA_SVW': { + featureKeyHandler: (feature: Feature) => feature?.properties?.OBJECTID, + popupContentHandler: (feature: Feature) => { + if (!feature || !feature.properties) { + return { tooltip: 'Unparsable Feature', content: [] }; + } + + const tooltip = `${feature.properties.PROTECTED_LANDS_NAME} - ${feature.properties.PROTECTED_LANDS_DESIGNATION}`; + + const content = ( + <> +
    {`Lands Name: ${feature.properties.PROTECTED_LANDS_NAME}`}
    +
    {`Lands Designation: ${feature.properties.PROTECTED_LANDS_DESIGNATION}`}
    +
    {`Region Area: ${(feature.properties.FEATURE_AREA_SQM / 10000).toFixed( + 0 + )} ha`}
    + + ); + + return { tooltip, content }; + } + }, + 'pub:WHSE_ADMIN_BOUNDARIES.ADM_NR_REGIONS_SPG': { + featureKeyHandler: (feature: Feature) => feature?.properties?.OBJECTID, + popupContentHandler: (feature: Feature) => { + if (!feature || !feature.properties) { + return { tooltip: 'Unparsable Feature', content: [] }; + } + + const tooltip = feature.properties.REGION_NAME; + + const content = ( + <> +
    {`Region Name: ${feature.properties.REGION_NAME}`}
    +
    {`Region Area: ${(feature.properties.FEATURE_AREA_SQM / 10000).toFixed( + 0 + )} ha`}
    + + ); + + return { tooltip, content }; + } + } +}; diff --git a/app/src/components/security/RouteGuards.tsx b/app/src/components/security/RouteGuards.tsx index 4dd46c3703..220c99a9a8 100644 --- a/app/src/components/security/RouteGuards.tsx +++ b/app/src/components/security/RouteGuards.tsx @@ -171,11 +171,7 @@ const CheckIfAuthenticatedUser: React.FC = ({ children }) => { } } else { // The user does not have a pending access request, restrict them to the access-request, request-submitted or logout pages - if ( - location.pathname !== '/access-request' && - location.pathname !== '/request-submitted' && - location.pathname !== '/logout' - ) { + if (!['/access-request', '/request-submitted', '/logout'].includes(location.pathname)) { // User attempted to go to restricted page return ; } diff --git a/app/src/components/surveys/SurveysList.tsx b/app/src/components/surveys/SurveysList.tsx index bad1fb0e89..4d1908f95a 100644 --- a/app/src/components/surveys/SurveysList.tsx +++ b/app/src/components/surveys/SurveysList.tsx @@ -1,4 +1,3 @@ -import Chip from '@material-ui/core/Chip'; import Link from '@material-ui/core/Link'; import { Theme } from '@material-ui/core/styles/createMuiTheme'; import makeStyles from '@material-ui/core/styles/makeStyles'; @@ -7,30 +6,14 @@ import TableBody from '@material-ui/core/TableBody'; import TableCell from '@material-ui/core/TableCell'; import TableContainer from '@material-ui/core/TableContainer'; import TableHead from '@material-ui/core/TableHead'; -import TablePagination from '@material-ui/core/TablePagination'; import TableRow from '@material-ui/core/TableRow'; -import clsx from 'clsx'; -import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { SurveyStatusType } from 'constants/misc'; +import Typography from '@material-ui/core/Typography'; import { SurveyViewObject } from 'interfaces/useSurveyApi.interface'; -import moment from 'moment'; import React, { useState } from 'react'; -import { useHistory } from 'react-router'; -import { handleChangePage, handleChangeRowsPerPage } from 'utils/tablePaginationUtils'; -import { getFormattedDateRangeString } from 'utils/Utils'; const useStyles = makeStyles((theme: Theme) => ({ - chip: { - color: '#ffffff' - }, - chipUnpublished: { - backgroundColor: theme.palette.text.disabled - }, - chipActive: { - backgroundColor: theme.palette.success.main - }, - chipPublishedCompleted: { - backgroundColor: theme.palette.success.main + surveyTable: { + tableLayout: 'fixed' } })); @@ -41,116 +24,51 @@ export interface ISurveysListProps { const SurveysList: React.FC = (props) => { const classes = useStyles(); - const history = useHistory(); - const [rowsPerPage, setRowsPerPage] = useState(5); - const [page, setPage] = useState(0); - - const getSurveyCompletionStatusType = (surveyObject: SurveyViewObject): SurveyStatusType => { - if ( - surveyObject.survey_details.end_date && - moment(surveyObject.survey_details.end_date).endOf('day').isBefore(moment()) - ) { - return SurveyStatusType.COMPLETED; - } - - return SurveyStatusType.ACTIVE; - }; - - const getSurveyPublishStatusType = (surveyObject: SurveyViewObject): SurveyStatusType => { - if (surveyObject.survey_details.publish_date) { - return SurveyStatusType.PUBLISHED; - } - - return SurveyStatusType.UNPUBLISHED; - }; - - const getChipIcon = (status_name: string) => { - let chipLabel; - let chipStatusClass; - - if (SurveyStatusType.UNPUBLISHED === status_name) { - chipLabel = 'Unpublished'; - chipStatusClass = classes.chipUnpublished; - } else if (SurveyStatusType.PUBLISHED === status_name) { - chipLabel = 'Published'; - chipStatusClass = classes.chipPublishedCompleted; - } else if (SurveyStatusType.ACTIVE === status_name) { - chipLabel = 'Active'; - chipStatusClass = classes.chipActive; - } else if (SurveyStatusType.COMPLETED === status_name) { - chipLabel = 'Completed'; - chipStatusClass = classes.chipPublishedCompleted; - } - - return ; - }; + const [rowsPerPage] = useState(5); + const [page] = useState(0); return ( <> - +
    Name Species - Timeline - Status - Published + Purpose {props.surveysList.length > 0 && props.surveysList.slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage).map((row, index) => ( - + - history.push(`/admin/projects/${props.projectId}/surveys/${row.survey_details.id}/details`) - }> + href={`/admin/projects/${props.projectId}/surveys/${row.survey_details.id}/details`}> {row.survey_details.survey_name} {[...row.species?.focal_species_names, ...row.species?.ancillary_species_names].join(', ')} - - {getFormattedDateRangeString( - DATE_FORMAT.ShortMediumDateFormat, - row.survey_details.start_date, - row.survey_details.end_date - )} - - {getChipIcon(getSurveyCompletionStatusType(row))} - {getChipIcon(getSurveyPublishStatusType(row))} + Community Composition ))} {!props.surveysList.length && ( - - No Surveys + + + No Surveys + )}
    - {props.surveysList.length > 0 && ( - handleChangePage(event, newPage, setPage)} - onChangeRowsPerPage={(event: React.ChangeEvent) => - handleChangeRowsPerPage(event, setPage, setRowsPerPage) - } - /> - )} ); }; diff --git a/app/src/components/toolbar/ActionToolbars.tsx b/app/src/components/toolbar/ActionToolbars.tsx index 910f62b375..411ac36a50 100644 --- a/app/src/components/toolbar/ActionToolbars.tsx +++ b/app/src/components/toolbar/ActionToolbars.tsx @@ -36,7 +36,7 @@ export const H3ButtonToolbar: React.FC = (props) => { endIcon={props.buttonEndIcon} onClick={() => props.buttonOnClick()} {...props.buttonProps}> - {props.buttonLabel} + {props.buttonLabel} ); @@ -50,7 +50,6 @@ export const H2ButtonToolbar: React.FC = (props) => { ); @@ -85,6 +84,7 @@ export interface ICustomMenuButtonProps { buttonTitle: string; buttonStartIcon?: ReactNode; buttonEndIcon?: ReactNode; + buttonVariant?: string; buttonProps?: Partial & { 'data-testid'?: string }; menuItems: IMenuToolbarItem[]; } @@ -127,6 +127,7 @@ export const CustomMenuButton: React.FC = (props) => { {props.buttonLabel} = (props) => { return ( - + {props.label} {props.children} diff --git a/app/src/constants/attachments.ts b/app/src/constants/attachments.ts index 06dbef7a12..a2446973a1 100644 --- a/app/src/constants/attachments.ts +++ b/app/src/constants/attachments.ts @@ -3,6 +3,13 @@ export enum AttachmentType { OTHER = 'Other' } +export enum AttachmentStatus { + PENDING_REVIEW = 'PENDING_REVIEW', + SECURED = 'SECURED', + UNSECURED = 'UNSECURED', + SUBMITTED = 'SUBMITTED' +} + export enum ProjectSurveyAttachmentValidExtensions { AUDIO = '.wav, .mp3, .mp4, .wma', DATA = '.txt, .xls, .xlsx, .xlsm, .xlsb, .accdb, .mdb, .ods, .csv', diff --git a/app/src/constants/i18n.ts b/app/src/constants/i18n.ts index 0f510e8e8d..4b02c552db 100644 --- a/app/src/constants/i18n.ts +++ b/app/src/constants/i18n.ts @@ -1,19 +1,35 @@ export const CreateProjectI18N = { - cancelTitle: 'Cancel Create Project', - cancelText: 'Are you sure you want to cancel?', + cancelTitle: 'Cancel Project Creation', + cancelText: 'Are you sure you want to cancel? Changes you have made will not be saved.', createErrorTitle: 'Error Creating Project', createErrorText: 'An error has occurred while attempting to create your project, please try again. If the error persists, please contact your system administrator.' }; +export const EditProjectI18N = { + cancelTitle: 'Cancel Edit Project', + cancelText: 'Are you sure you want to cancel? Changes you have made will not be saved.', + createErrorTitle: 'Error Editing Project', + createErrorText: + 'An error has occurred while attempting to edit your project, please try again. If the error persists, please contact your system administrator.' +}; + export const CreateSurveyI18N = { - cancelTitle: 'Cancel Create Survey', - cancelText: 'Are you sure you want to cancel?', + cancelTitle: 'Cancel Survey Creation', + cancelText: 'Are you sure you want to cancel? Changes you have made will not be saved.', createErrorTitle: 'Error Creating Survey', createErrorText: 'An error has occurred while attempting to create your survey, please try again. If the error persists, please contact your system administrator.' }; +export const EditSurveyI18N = { + cancelTitle: 'Cancel Survey Edit', + cancelText: 'Are you sure you want to cancel? Changes you have made will not be saved.', + createErrorTitle: 'Error Editing Survey', + createErrorText: + 'An error has occurred while attempting to create your survey, please try again. If the error persists, please contact your system administrator.' +}; + export const CreatePermitsI18N = { cancelTitle: 'Cancel Create Permits', cancelText: 'Are you sure you want to cancel?', @@ -42,6 +58,12 @@ export const CreateProjectDraftI18N = { 'An error has occurred while attempting to save your draft, please try again. If the error persists, please contact your system administrator.' }; +export const DeleteProjectDraftI18N = { + draftErrorTitle: 'Error Deleting Draft', + draftErrorText: + 'An error has occurred while attempting to delete your draft, please try again. If the error persists, please contact your system administrator.' +}; + export const EditPartnershipsI18N = { editTitle: 'Edit Partnerships', editErrorTitle: 'Error Editing Partnerships', @@ -163,6 +185,18 @@ export const AccessRequestI18N = { 'An error has occurred while attempting to make an access request, please try again. If the error persists, please contact your system administrator.' }; +export const AccessApprovalDispatchI18N = { + reviewErrorTitle: 'Error dispatching granted access notification', + reviewErrorText: + 'The access request was approved successfully, but an error occurred while attempting to notify the user. Please contact your system administrator.' +}; + +export const AccessDenialDispatchI18N = { + reviewErrorTitle: 'Error dispatching denied access notification', + reviewErrorText: + 'The access request was denied successfully, but an error occurred while attempting to notify the user. Please contact your system administrator.' +}; + export const ReviewAccessRequestI18N = { reviewErrorTitle: 'Error reviewing access request', reviewErrorText: @@ -177,14 +211,6 @@ export const DeleteProjectI18N = { 'An error has occurred while attempting to delete this project, its attachments and associated surveys/observations, please try again. If the error persists, please contact your system administrator.' }; -export const PublishProjectI18N = { - publishTitle: 'Publish Project', - publishText: 'Are you sure you want to publish this project?', - publishErrorTitle: 'Error Publishing Project', - publishErrorText: - 'An error has occurred while attempting to publish this project, please try again. If the error persists, please contact your system administrator.' -}; - export const DeleteSurveyI18N = { deleteTitle: 'Delete Survey', deleteText: 'Are you sure you want to delete this survey, its attachments and associated observations?', @@ -193,14 +219,6 @@ export const DeleteSurveyI18N = { 'An error has occurred while attempting to delete this project, its attachments and associated surveys/observations, please try again. If the error persists, please contact your system administrator.' }; -export const PublishSurveyI18N = { - publishTitle: 'Publish Survey', - publishText: 'Are you sure you want to publish this survey?', - publishErrorTitle: 'Error Publishing Survey', - publishErrorText: - 'An error has occurred while attempting to publish this survey, please try again. If the error persists, please contact your system administrator.' -}; - export const EditReportMetaDataI18N = { editTitle: 'Edit Report Meta Data', editErrorTitle: 'Error Editing Report Meta Data', @@ -208,10 +226,22 @@ export const EditReportMetaDataI18N = { 'An error has occurred while attempting to edit your report meta data, please try again. If the error persists, please contact your system administrator.' }; +export const AddSystemUserI18N = { + addUserErrorTitle: 'Error Adding System User', + addUserErrorText: + 'An error has occurred while attempting to add the system user. This user has already been granted this role. If the error persists, please contact your system administrator.' +}; + +export const UpdateSystemUserI18N = { + updateUserErrorTitle: 'Error Updating System User', + updateUserErrorText: + 'An error has occurred while attempting to update the system user. If the error persists, please contact your system administrator.' +}; + export const DeleteSystemUserI18N = { - deleteErrorTitle: 'Error Deleting System User', - deleteErrorText: - 'An error has occurred while attempting to delete the system user, please try again. If the error persists, please contact your system administrator.' + deleteUserErrorTitle: 'Error Deleting System User', + deleteUserErrorText: + 'An error has occurred while attempting to delete the system user. If the error persists, please contact your system administrator.' }; export const ProjectParticipantsI18N = { diff --git a/app/src/constants/misc.ts b/app/src/constants/misc.ts index ea2ffe37ec..99554b5733 100644 --- a/app/src/constants/misc.ts +++ b/app/src/constants/misc.ts @@ -9,16 +9,16 @@ export enum AdministrativeActivityStatusType { } export enum ProjectStatusType { - UNPUBLISHED = 'Unpublished', - PUBLISHED = 'Published', COMPLETED = 'Completed', ACTIVE = 'Active', DRAFT = 'Draft' } export enum SurveyStatusType { - UNPUBLISHED = 'Unpublished', - PUBLISHED = 'Published', COMPLETED = 'Completed', ACTIVE = 'Active' } + +export enum DocumentReviewStatus { + PENDING = 'Pending Review' +} diff --git a/app/src/constants/spatial.ts b/app/src/constants/spatial.ts new file mode 100644 index 0000000000..69c82d8f32 --- /dev/null +++ b/app/src/constants/spatial.ts @@ -0,0 +1,34 @@ +import { Feature } from 'geojson'; + +export const MAP_DEFAULT_ZOOM = 6; +export const MAP_MIN_ZOOM = 6; +export const MAP_MAX_ZOOM = 15; + +export const ALL_OF_BC_BOUNDARY: Feature = { + type: 'Feature', + properties: {}, + geometry: { + type: 'Polygon', + coordinates: [ + [ + [-146.95401365536304, 44.62175409623327], + [-146.95401365536304, 63.528970541102794], + [-105.07413084286304, 63.528970541102794], + [-105.07413084286304, 44.62175409623327], + [-146.95401365536304, 44.62175409623327] + ] + ] + } +}; + +export enum SPATIAL_COMPONENT_TYPE { + OCCURRENCE = 'Occurrence', + BOUNDARY = 'Boundary', + BOUNDARY_CENTROID = 'Boundary Centroid' +} + +export enum LAYER_NAME { + OCCURRENCES = 'Occurrences', + BOUNDARIES = 'Boundaries', + DATASETS = 'Datasets' +} diff --git a/app/src/contexts/configContext.tsx b/app/src/contexts/configContext.tsx index 70f7a54b21..b66cbdb402 100644 --- a/app/src/contexts/configContext.tsx +++ b/app/src/contexts/configContext.tsx @@ -51,7 +51,6 @@ const getLocalConfig = (): IConfig => { const OBJECT_STORE_URL = process.env.OBJECT_STORE_URL || 'nrs.objectstore.gov.bc.ca'; const OBJECT_STORE_BUCKET_NAME = process.env.OBJECT_STORE_BUCKET_NAME || 'gblhvt'; - return { API_HOST: ensureProtocol(API_URL, 'http://'), N8N_HOST: ensureProtocol(N8N_URL, 'http://'), @@ -60,12 +59,11 @@ const getLocalConfig = (): IConfig => { REACT_APP_NODE_ENV: process.env.REACT_APP_NODE_ENV || 'dev', VERSION: `${process.env.VERSION || 'NA'}(build #${process.env.CHANGE_VERSION || 'NA'})`, KEYCLOAK_CONFIG: { - url: process.env.SSO_URL || 'https://dev.oidc.gov.bc.ca/auth', - realm: process.env.SSO_REALM || '35r1iman', - clientId: process.env.SSO_CLIENT_ID || 'biohubbc' + url: process.env.REACT_APP_KEYCLOAK_HOST || '', + realm: process.env.REACT_APP_KEYCLOAK_REALM || '', + clientId: process.env.REACT_APP_KEYCLOAK_CLIENT_ID || '' }, - SITEMINDER_LOGOUT_URL: - process.env.REACT_APP_SITEMINDER_LOGOUT_URL || 'https://logontest7.gov.bc.ca/clp-cgi/logoff.cgi', + SITEMINDER_LOGOUT_URL: process.env.REACT_APP_SITEMINDER_LOGOUT_URL || '', MAX_UPLOAD_NUM_FILES: Number(process.env.REACT_APP_MAX_UPLOAD_NUM_FILES) || 10, MAX_UPLOAD_FILE_SIZE: Number(process.env.REACT_APP_MAX_UPLOAD_FILE_SIZE) || 52428800, S3_PUBLIC_HOST_URL: ensureProtocol(`${OBJECT_STORE_URL}/${OBJECT_STORE_BUCKET_NAME}`, 'https://') diff --git a/app/src/features/admin/AdminUsersLayout.test.tsx b/app/src/features/admin/AdminUsersLayout.test.tsx deleted file mode 100644 index 64b2f66160..0000000000 --- a/app/src/features/admin/AdminUsersLayout.test.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import { render } from '@testing-library/react'; -import React from 'react'; -import AdminUsersLayout from './AdminUsersLayout'; - -describe('AdminUsersLayout', () => { - it('renders correctly', () => { - const { getByText } = render( - -

    This is the admin users layout test child component

    -
    - ); - - expect(getByText('This is the admin users layout test child component')).toBeVisible(); - }); -}); diff --git a/app/src/features/admin/users/AccessRequestList.test.tsx b/app/src/features/admin/users/AccessRequestList.test.tsx index b389bc9208..9ecae1b75b 100644 --- a/app/src/features/admin/users/AccessRequestList.test.tsx +++ b/app/src/features/admin/users/AccessRequestList.test.tsx @@ -60,6 +60,7 @@ describe('AccessRequestList', () => { data: { name: 'test user', username: 'testusername', + userGuid: 'aaaa', email: 'email@email.com', role: 2, identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, @@ -76,7 +77,7 @@ describe('AccessRequestList', () => { await waitFor(() => { expect(getByText('testusername')).toBeVisible(); expect(getByText('Apr 20, 2020')).toBeVisible(); - expect(getByText('Pending')).toBeVisible(); + expect(getByText('Review')).toBeVisible(); expect(getByRole('button')).toHaveTextContent('Review'); }); }); @@ -95,6 +96,7 @@ describe('AccessRequestList', () => { data: { name: 'test user', username: 'testusername', + userGuid: 'aaaa', email: 'email@email.com', role: 2, identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, @@ -130,6 +132,7 @@ describe('AccessRequestList', () => { data: { name: 'test user', username: 'testusername', + userGuid: 'aaaa', email: 'email@email.com', role: 2, identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, @@ -172,7 +175,7 @@ describe('AccessRequestList', () => { await waitFor(() => { expect(getByText('Apr 20, 2020')).toBeVisible(); - expect(getByText('Pending')).toBeVisible(); + expect(getByText('Review')).toBeVisible(); }); }); @@ -192,6 +195,7 @@ describe('AccessRequestList', () => { data: { name: 'test user', username: 'testusername', + userGuid: 'aaaa', email: 'email@email.com', role: 2, identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, @@ -220,6 +224,7 @@ describe('AccessRequestList', () => { expect(mockBiohubApi().admin.approveAccessRequest).toHaveBeenCalledTimes(1); expect(mockBiohubApi().admin.approveAccessRequest).toHaveBeenCalledWith( 1, + 'aaaa', 'testusername', SYSTEM_IDENTITY_SOURCE.IDIR, [2] @@ -243,6 +248,7 @@ describe('AccessRequestList', () => { data: { name: 'test user', username: 'testusername', + userGuid: 'aaaa', email: 'email@email.com', role: 1, identitySource: SYSTEM_IDENTITY_SOURCE.IDIR, diff --git a/app/src/features/admin/users/AccessRequestList.tsx b/app/src/features/admin/users/AccessRequestList.tsx index d99f91cdd9..c0558f492e 100644 --- a/app/src/features/admin/users/AccessRequestList.tsx +++ b/app/src/features/admin/users/AccessRequestList.tsx @@ -1,5 +1,6 @@ import Box from '@material-ui/core/Box'; import Button from '@material-ui/core/Button'; +import Divider from '@material-ui/core/Divider'; import Paper from '@material-ui/core/Paper'; import makeStyles from '@material-ui/core/styles/makeStyles'; import Table from '@material-ui/core/Table'; @@ -13,7 +14,7 @@ import Typography from '@material-ui/core/Typography'; import { AccessStatusChip } from 'components/chips/RequestChips'; import RequestDialog from 'components/dialog/RequestDialog'; import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { ReviewAccessRequestI18N } from 'constants/i18n'; +import { AccessApprovalDispatchI18N, AccessDenialDispatchI18N, ReviewAccessRequestI18N } from 'constants/i18n'; import { AdministrativeActivityStatusType } from 'constants/misc'; import { DialogContext } from 'contexts/dialogContext'; import { APIError } from 'hooks/api/useAxios'; @@ -34,6 +35,9 @@ const useStyles = makeStyles(() => ({ '& td': { verticalAlign: 'middle' } + }, + toolbarCount: { + fontWeight: 400 } })); @@ -53,7 +57,6 @@ const AccessRequestList: React.FC = (props) => { const { accessRequests, codes, refresh } = props; const classes = useStyles(); - const biohubApi = useBiohubApi(); const [activeReviewDialog, setActiveReviewDialog] = useState<{ @@ -78,6 +81,30 @@ const AccessRequestList: React.FC = (props) => { } }; + const dispatchApprovalErrorDialogProps = { + dialogTitle: AccessApprovalDispatchI18N.reviewErrorTitle, + dialogText: AccessApprovalDispatchI18N.reviewErrorText, + open: false, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + } + }; + + const dispatchDenialErrorDialogProps = { + dialogTitle: AccessDenialDispatchI18N.reviewErrorTitle, + dialogText: AccessDenialDispatchI18N.reviewErrorText, + open: false, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + } + }; + const handleReviewDialogApprove = async (values: IReviewAccessRequestForm) => { const updatedRequest = activeReviewDialog.request as IGetAccessRequestsListResponse; @@ -86,12 +113,36 @@ const AccessRequestList: React.FC = (props) => { try { await biohubApi.admin.approveAccessRequest( updatedRequest.id, + updatedRequest.data.userGuid, updatedRequest.data.username, updatedRequest.data.identitySource, (values.system_role && [values.system_role]) || [] ); - refresh(); + try { + await biohubApi.admin.sendGCNotification( + { + emailAddress: updatedRequest.data.email, + phoneNumber: '', + userId: updatedRequest.id + }, + { + subject: 'SIMS: Your request for access has been approved.', + header: 'Your request for access to the Species Inventory Management System has been approved.', + body1: 'This is an automated message from the BioHub Species Inventory Management System', + body2: '', + footer: '' + } + ); + } catch (error) { + dialogContext.setErrorDialog({ + ...dispatchApprovalErrorDialogProps, + open: true, + dialogErrorDetails: (error as APIError).errors + }); + } finally { + refresh(); + } } catch (error) { dialogContext.setErrorDialog({ ...defaultErrorDialogProps, @@ -109,7 +160,30 @@ const AccessRequestList: React.FC = (props) => { try { await biohubApi.admin.denyAccessRequest(updatedRequest.id); - refresh(); + try { + await biohubApi.admin.sendGCNotification( + { + emailAddress: updatedRequest.data.email, + phoneNumber: '', + userId: updatedRequest.id + }, + { + subject: 'SIMS: Your request for access has been denied.', + header: 'Your request for access to the Species Inventory Management System has been denied.', + body1: 'This is an automated message from the BioHub Species Inventory Management System', + body2: '', + footer: '' + } + ); + } catch (error) { + dialogContext.setErrorDialog({ + ...dispatchDenialErrorDialogProps, + open: true, + dialogErrorDetails: (error as APIError).errors + }); + } finally { + refresh(); + } } catch (error) { dialogContext.setErrorDialog({ ...defaultErrorDialogProps, @@ -145,57 +219,63 @@ const AccessRequestList: React.FC = (props) => { ) }} /> - - - - Access Requests ({accessRequests?.length || 0}) - + + + + Access Requests{' '} + + ({accessRequests?.length || 0}) + + - - - - - Username - Date of Request - Access Status - - Actions - - - - - {!accessRequests?.length && ( - - - No Access Requests + + + +
    + + + Username + Date of Request + Status + + Actions - )} - {accessRequests?.map((row, index) => { - return ( - - {row.data?.username || ''} - {getFormattedDate(DATE_FORMAT.ShortMediumDateFormat, row.create_date)} - - - - - - {row.status_name === AdministrativeActivityStatusType.PENDING && ( - - )} + + + {!accessRequests?.length && ( + + + No Access Requests - ); - })} - -
    -
    + )} + {accessRequests?.map((row, index) => { + return ( + + {row.data?.username || ''} + {getFormattedDate(DATE_FORMAT.ShortMediumDateFormat, row.create_date)} + + + + + + {row.status_name === AdministrativeActivityStatusType.PENDING && ( + + )} + + + ); + })} + + + +
    ); diff --git a/app/src/features/admin/users/ActiveUsersList.test.tsx b/app/src/features/admin/users/ActiveUsersList.test.tsx index 102f5b69c0..10d9f49165 100644 --- a/app/src/features/admin/users/ActiveUsersList.test.tsx +++ b/app/src/features/admin/users/ActiveUsersList.test.tsx @@ -34,6 +34,7 @@ describe('ActiveUsersList', () => { { id: 1, user_identifier: 'username', + user_guid: 'user-guid', user_record_end_date: '2020-10-10', role_names: ['role 1', 'role 2'] } @@ -54,6 +55,7 @@ describe('ActiveUsersList', () => { { id: 1, user_identifier: 'username', + user_guid: 'user-guid', user_record_end_date: '2020-10-10', role_names: [] } diff --git a/app/src/features/admin/users/ActiveUsersList.tsx b/app/src/features/admin/users/ActiveUsersList.tsx index 90f7037156..0cc90da0b7 100644 --- a/app/src/features/admin/users/ActiveUsersList.tsx +++ b/app/src/features/admin/users/ActiveUsersList.tsx @@ -1,8 +1,8 @@ import Box from '@material-ui/core/Box'; import Button from '@material-ui/core/Button'; -import Grid from '@material-ui/core/Grid'; +import Divider from '@material-ui/core/Divider'; +import Link from '@material-ui/core/Link'; import Paper from '@material-ui/core/Paper'; -import { Theme } from '@material-ui/core/styles/createMuiTheme'; import makeStyles from '@material-ui/core/styles/makeStyles'; import Table from '@material-ui/core/Table'; import TableBody from '@material-ui/core/TableBody'; @@ -13,12 +13,11 @@ import TablePagination from '@material-ui/core/TablePagination'; import TableRow from '@material-ui/core/TableRow'; import Toolbar from '@material-ui/core/Toolbar'; import Typography from '@material-ui/core/Typography'; -import { mdiDotsVertical, mdiInformationOutline, mdiMenuDown, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; +import { mdiChevronDown, mdiDotsVertical, mdiInformationOutline, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; import Icon from '@mdi/react'; import EditDialog from 'components/dialog/EditDialog'; -import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; import { CustomMenuButton, CustomMenuIconButton } from 'components/toolbar/ActionToolbars'; -import { DeleteSystemUserI18N } from 'constants/i18n'; +import { AddSystemUserI18N, DeleteSystemUserI18N, UpdateSystemUserI18N } from 'constants/i18n'; import { DialogContext, ISnackbarProps } from 'contexts/dialogContext'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; @@ -33,12 +32,19 @@ import AddSystemUsersForm, { IAddSystemUsersForm } from './AddSystemUsersForm'; -const useStyles = makeStyles((theme: Theme) => ({ +const useStyles = makeStyles(() => ({ table: { tableLayout: 'fixed', '& td': { verticalAlign: 'middle' } + }, + toolbarCount: { + fontWeight: 400 + }, + linkButton: { + textAlign: 'left', + fontWeight: 700 } })); @@ -66,22 +72,6 @@ const ActiveUsersList: React.FC = (props) => { const [openAddUserDialog, setOpenAddUserDialog] = useState(false); - const defaultErrorDialogProps = { - dialogTitle: DeleteSystemUserI18N.deleteErrorTitle, - dialogText: DeleteSystemUserI18N.deleteErrorText, - open: false, - onClose: () => { - dialogContext.setErrorDialog({ open: false }); - }, - onOk: () => { - dialogContext.setErrorDialog({ open: false }); - } - }; - - const showErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ ...defaultErrorDialogProps, ...textDialogProps, open: true }); - }; - const showSnackBar = (textDialogProps?: Partial) => { dialogContext.setSnackbar({ ...textDialogProps, open: true }); }; @@ -95,7 +85,7 @@ const ActiveUsersList: React.FC = (props) => { related projects. Are you sure you want to proceed?
    ), - yesButtonLabel: 'Remove User', + yesButtonLabel: 'Remove', noButtonLabel: 'Cancel', yesButtonProps: { color: 'secondary' }, onClose: () => { @@ -133,7 +123,20 @@ const ActiveUsersList: React.FC = (props) => { props.refresh(); } catch (error) { const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, dialogErrorDetails: apiError.errors, open: true }); + + dialogContext.setErrorDialog({ + open: true, + dialogTitle: DeleteSystemUserI18N.deleteUserErrorTitle, + dialogText: DeleteSystemUserI18N.deleteUserErrorText, + dialogError: apiError.message, + dialogErrorDetails: apiError.errors, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + } + }); } }; @@ -185,7 +188,19 @@ const ActiveUsersList: React.FC = (props) => { props.refresh(); } catch (error) { const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, dialogErrorDetails: apiError.errors, open: true }); + dialogContext.setErrorDialog({ + open: true, + dialogTitle: UpdateSystemUserI18N.updateUserErrorTitle, + dialogText: UpdateSystemUserI18N.updateUserErrorText, + dialogError: apiError.message, + dialogErrorDetails: apiError.errors, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + } + }); } }; @@ -197,7 +212,7 @@ const ActiveUsersList: React.FC = (props) => { await biohubApi.admin.addSystemUser( systemUser.userIdentifier, systemUser.identitySource, - systemUser.system_role + systemUser.systemRole ); } @@ -212,132 +227,135 @@ const ActiveUsersList: React.FC = (props) => { ) }); } catch (error) { + const apiError = error as APIError; dialogContext.setErrorDialog({ - ...defaultErrorDialogProps, open: true, - dialogError: (error as APIError).message, - dialogErrorDetails: (error as APIError).errors + dialogTitle: AddSystemUserI18N.addUserErrorTitle, + dialogText: AddSystemUserI18N.addUserErrorText, + dialogError: apiError.message, + dialogErrorDetails: apiError.errors, + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + } }); } }; return ( <> - - - - - - Active Users ({activeUsers?.length || 0}) - - - - - - - - - + + + + Active Users{' '} + + ({activeUsers?.length || 0}) + + + - - - - - Username - Role - - Actions - - - - - {!activeUsers?.length && ( - - - No Active Users + + + +
    + + + Username + Role + + Actions - )} - {activeUsers.length > 0 && - activeUsers.slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage).map((row, index) => ( - - - {row.user_identifier || 'Not Applicable'} - - - - { - return item1.name.localeCompare(item2.name); - }) - .map((item) => { - return { - menuLabel: item.name, - menuOnClick: () => handleChangeUserPermissionsClick(row, item.name, item.id) - }; - })} - buttonEndIcon={} - /> - - - - - } - menuItems={[ - { - menuIcon: , - menuLabel: 'View Users Details', - menuOnClick: () => - history.push({ - pathname: `/admin/users/${row.id}`, - state: row - }) - }, - { - menuIcon: , - menuLabel: 'Remove User', - menuOnClick: () => handleRemoveUserClick(row) - } - ]} - /> - + + + {!activeUsers?.length && ( + + + No Active Users - ))} - -
    -
    - {activeUsers?.length > 0 && ( - handleChangePage(event, newPage, setPage)} - onChangeRowsPerPage={(event: React.ChangeEvent) => - handleChangeRowsPerPage(event, setPage, setRowsPerPage) - } - /> - )} + )} + {activeUsers.length > 0 && + activeUsers.slice(page * rowsPerPage, page * rowsPerPage + rowsPerPage).map((row, index) => ( + + + + {row.user_identifier || 'No identifier'} + + + + + { + return item1.name.localeCompare(item2.name); + }) + .map((item) => { + return { + menuLabel: item.name, + menuOnClick: () => handleChangeUserPermissionsClick(row, item.name, item.id) + }; + })} + buttonEndIcon={} + /> + + + + + } + menuItems={[ + { + menuIcon: , + menuLabel: 'View Users Details', + menuOnClick: () => + history.push({ + pathname: `/admin/users/${row.id}`, + state: row + }) + }, + { + menuIcon: , + menuLabel: 'Remove User', + menuOnClick: () => handleRemoveUserClick(row) + } + ]} + /> + + + + ))} + + + + {activeUsers?.length > 0 && ( + handleChangePage(event, newPage, setPage)} + onChangeRowsPerPage={(event: React.ChangeEvent) => + handleChangeRowsPerPage(event, setPage, setRowsPerPage) + } + /> + )} +
    = (props) => { component={{ element: ( { return { value: item.id, label: item.name }; }) || [] diff --git a/app/src/features/admin/users/AddSystemUsersForm.tsx b/app/src/features/admin/users/AddSystemUsersForm.tsx index ca7869beda..59a8ab5267 100644 --- a/app/src/features/admin/users/AddSystemUsersForm.tsx +++ b/app/src/features/admin/users/AddSystemUsersForm.tsx @@ -18,7 +18,7 @@ import yup from 'utils/YupSchema'; export interface IAddSystemUsersFormArrayItem { userIdentifier: string; identitySource: string; - system_role: number; + systemRole: number; } export interface IAddSystemUsersForm { @@ -28,7 +28,7 @@ export interface IAddSystemUsersForm { export const AddSystemUsersFormArrayItemInitialValues: IAddSystemUsersFormArrayItem = { userIdentifier: '', identitySource: '', - system_role: ('' as unknown) as number + systemRole: ('' as unknown) as number }; export const AddSystemUsersFormInitialValues: IAddSystemUsersForm = { @@ -40,13 +40,13 @@ export const AddSystemUsersFormYupSchema = yup.object().shape({ yup.object().shape({ userIdentifier: yup.string().required('Username is required'), identitySource: yup.string().required('Login Method is required'), - system_role: yup.number().required('Role is required') + systemRole: yup.number().required('Role is required') }) ) }); export interface AddSystemUsersFormProps { - system_roles: any[]; + systemRoles: any[]; } const AddSystemUsersForm: React.FC = (props) => { @@ -59,7 +59,7 @@ const AddSystemUsersForm: React.FC = (props) => { render={(arrayHelpers) => ( - {values.systemUsers?.map((systemUser, index) => { + {values.systemUsers?.map((systemUser: IAddSystemUsersFormArrayItem, index: number) => { const userIdentifierMeta = getFieldMeta(`systemUsers.[${index}].userIdentifier`); const identitySourceMeta = getFieldMeta(`systemUsers.[${index}].identitySource`); const systemRoleMeta = getFieldMeta(`systemUsers.[${index}].roleId`); @@ -98,8 +98,15 @@ const AddSystemUsersForm: React.FC = (props) => { IDIR - - BCEID + + BCeID Basic + + + BCeID Business {identitySourceMeta.touched && identitySourceMeta.error} @@ -111,17 +118,17 @@ const AddSystemUsersForm: React.FC = (props) => { System Role - - - - -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    - -
    - -
    -
    - - Share Contact Details - -

    - Do you want the project contact’s name and email address visible to the public? -

    -
    -
    - - -

    -

    -
    -
    -
    - - - -
    -
    -
    -

    - Non-Sampling Permits -

    -
    -

    - Enter any scientific collection, wildlife act and/or park use permits. Provide the last 6 digits of the permit number. The last 6 digits are those after the hyphen (e.g. for KA12-845782 enter 845782). -

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - - ​ - -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    -
    - -
    -
    -
    -
    -
    -
    -
    - - -
    - - - - -`; - -exports[`CreatePermitPage shows circular spinner when codes not yet loaded 1`] = ` - -
    - - - -
    -
    -`; diff --git a/app/src/features/permits/__snapshots__/PermitsPage.test.tsx.snap b/app/src/features/permits/__snapshots__/PermitsPage.test.tsx.snap deleted file mode 100644 index ebb1dbfa0b..0000000000 --- a/app/src/features/permits/__snapshots__/PermitsPage.test.tsx.snap +++ /dev/null @@ -1,164 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`PermitsPage renders with a proper list of permits 1`] = ` - -
    -
    -
    -

    - Permits -

    - -
    -
    -
    -

    - 2 Permits found -

    -
    -
    -
    - - - - - - - - - - - - - - - - - - - - - - - -
    - Number - - Type - - Contact Agency - - Associated Project -
    - 123 - - Wildlife - - contact agency - - Project 1 -
    - 1234 - - Wildlife - - contact agency 2 - - No Associated Project -
    -
    -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/ProjectsRouter.tsx b/app/src/features/projects/ProjectsRouter.tsx index c74617b822..0cbbcd69c0 100644 --- a/app/src/features/projects/ProjectsRouter.tsx +++ b/app/src/features/projects/ProjectsRouter.tsx @@ -1,11 +1,13 @@ import ProjectsLayout from 'features/projects/ProjectsLayout'; import ProjectPage from 'features/projects/view/ProjectPage'; import CreateSurveyPage from 'features/surveys/CreateSurveyPage'; +import EditSurveyPage from 'features/surveys/edit/EditSurveyPage'; import SurveyPage from 'features/surveys/view/SurveyPage'; import React from 'react'; import { Redirect, Switch } from 'react-router'; import AppRoute from 'utils/AppRoute'; import CreateProjectPage from './create/CreateProjectPage'; +import EditProjectPage from './edit/EditProjectPage'; import ProjectsListPage from './list/ProjectsListPage'; import ProjectParticipantsPage from './participants/ProjectParticipantsPage'; @@ -29,6 +31,12 @@ const ProjectsRouter: React.FC = () => { + + + + + + @@ -77,6 +85,10 @@ const ProjectsRouter: React.FC = () => { + + + + diff --git a/app/src/features/projects/PublicProjectsRouter.tsx b/app/src/features/projects/PublicProjectsRouter.tsx deleted file mode 100644 index 924a5fea80..0000000000 --- a/app/src/features/projects/PublicProjectsRouter.tsx +++ /dev/null @@ -1,38 +0,0 @@ -import PublicProjectPage from 'pages/public/PublicProjectPage'; -import PublicProjectsListPage from 'pages/public/PublicProjectsListPage'; -import React from 'react'; -import { Redirect, Switch } from 'react-router'; -import AppRoute from 'utils/AppRoute'; - -/** - * Router for all `/projects/*` pages. - * - * @param {*} props - * @return {*} - */ -const PublicProjectsRouter: React.FC = () => { - return ( - - - - - - - - - - - - - - - - {/* Catch any unknown routes, and re-direct to the not found page */} - - - - - ); -}; - -export default PublicProjectsRouter; diff --git a/app/src/features/projects/components/ProjectCoordinatorForm.test.tsx b/app/src/features/projects/components/ProjectCoordinatorForm.test.tsx index ef390e28c3..0df89f229b 100644 --- a/app/src/features/projects/components/ProjectCoordinatorForm.test.tsx +++ b/app/src/features/projects/components/ProjectCoordinatorForm.test.tsx @@ -1,4 +1,4 @@ -import { render } from '@testing-library/react'; +import { render, waitFor } from '@testing-library/react'; import ProjectCoordinatorForm, { ProjectCoordinatorInitialValues, ProjectCoordinatorYupSchema @@ -11,16 +11,18 @@ const handleSaveAndNext = jest.fn(); const agencies = ['Agency 1', 'Agency 2', 'Agency 3']; const projectCoordinatorFilledValues = { - first_name: 'Nerea', - last_name: 'Oneal', - email_address: 'quxu@mailinator.com', - coordinator_agency: 'Agency 3', - share_contact_details: 'true' + coordinator: { + first_name: 'Nerea', + last_name: 'Oneal', + email_address: 'quxu@mailinator.com', + coordinator_agency: 'Agency 3', + share_contact_details: 'true' + } }; describe('Project Contact Form', () => { - it('renders correctly the empty component correctly', () => { - const { asFragment } = render( + it('renders correctly the empty component correctly', async () => { + const { getByLabelText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('First Name', { exact: false })).toBeVisible(); + }); }); - it('renders correctly the filled component correctly', () => { - const { asFragment } = render( + it('renders correctly the filled component correctly', async () => { + const { getByLabelText, getByDisplayValue } = render( { ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('First Name', { exact: false })).toBeVisible(); + expect(getByDisplayValue('Nerea', { exact: false })).toBeVisible(); + }); }); }); diff --git a/app/src/features/projects/components/ProjectCoordinatorForm.tsx b/app/src/features/projects/components/ProjectCoordinatorForm.tsx index f3738b4904..21ff995135 100644 --- a/app/src/features/projects/components/ProjectCoordinatorForm.tsx +++ b/app/src/features/projects/components/ProjectCoordinatorForm.tsx @@ -5,71 +5,70 @@ import FormHelperText from '@material-ui/core/FormHelperText'; import Grid from '@material-ui/core/Grid'; import Radio from '@material-ui/core/Radio'; import RadioGroup from '@material-ui/core/RadioGroup'; -import { Theme } from '@material-ui/core/styles/createMuiTheme'; -import makeStyles from '@material-ui/core/styles/makeStyles'; import Typography from '@material-ui/core/Typography'; import AutocompleteFreeSoloField from 'components/fields/AutocompleteFreeSoloField'; import CustomTextField from 'components/fields/CustomTextField'; import { useFormikContext } from 'formik'; +import { ICreateProjectRequest } from 'interfaces/useProjectApi.interface'; import React from 'react'; import yup from 'utils/YupSchema'; export interface IProjectCoordinatorForm { - first_name: string; - last_name: string; - email_address: string; - coordinator_agency: string; - share_contact_details: string; + coordinator: { + first_name: string; + last_name: string; + email_address: string; + coordinator_agency: string; + share_contact_details: string; + }; } export const ProjectCoordinatorInitialValues: IProjectCoordinatorForm = { - first_name: '', - last_name: '', - email_address: '', - coordinator_agency: '', - share_contact_details: 'false' + coordinator: { + first_name: '', + last_name: '', + email_address: '', + coordinator_agency: '', + share_contact_details: 'false' + } }; export const ProjectCoordinatorYupSchema = yup.object().shape({ - first_name: yup.string().max(50, 'Cannot exceed 50 characters').required('Required'), - last_name: yup.string().max(50, 'Cannot exceed 50 characters').required('Required'), - email_address: yup - .string() - .max(500, 'Cannot exceed 500 characters') - .email('Must be a valid email address') - .required('Required'), - coordinator_agency: yup.string().max(300, 'Cannot exceed 300 characters').required('Required').nullable(), - share_contact_details: yup.string().required('Required') + coordinator: yup.object().shape({ + first_name: yup.string().max(50, 'Cannot exceed 50 characters').required('First Name is Required'), + last_name: yup.string().max(50, 'Cannot exceed 50 characters').required('Last Name is Required'), + email_address: yup + .string() + .max(500, 'Cannot exceed 500 characters') + .email('Must be a valid email address') + .required('Business Email Address is Required'), + coordinator_agency: yup + .string() + .max(300, 'Cannot exceed 300 characters') + .required('Coordinator Agency is Required') + .nullable(), + share_contact_details: yup.string().required('Please select an option') + }) }); export interface IProjectCoordinatorFormProps { coordinator_agency: string[]; } -const useStyles = makeStyles((theme: Theme) => ({ - legend: { - marginTop: '1rem', - float: 'left', - marginBottom: '0.75rem', - letterSpacing: '-0.01rem' - } -})); - /** * Create project - coordinator fields * * @return {*} */ const ProjectCoordinatorForm: React.FC = (props) => { - const classes = useStyles(); - const { values, touched, errors, handleChange, handleSubmit } = useFormikContext(); + const { values, touched, handleSubmit, errors, handleChange } = useFormikContext(); return (
    = (props) = = (props) = = (props) = - + - + error={touched.coordinator?.share_contact_details && Boolean(errors.coordinator?.share_contact_details)}> + Share Contact Details - + - Do you want the project contact’s name and email address visible to the public? + Do you want the project contact's name and email address visible to the public? = (props) = control={} label="Yes" /> - {errors.share_contact_details} + {errors.coordinator?.share_contact_details} diff --git a/app/src/features/projects/components/ProjectDetailsForm.test.tsx b/app/src/features/projects/components/ProjectDetailsForm.test.tsx index 80ee1e1453..2c62221da0 100644 --- a/app/src/features/projects/components/ProjectDetailsForm.test.tsx +++ b/app/src/features/projects/components/ProjectDetailsForm.test.tsx @@ -1,4 +1,4 @@ -import { render } from '@testing-library/react'; +import { render, waitFor } from '@testing-library/react'; import { IMultiAutocompleteFieldOption } from 'components/fields/MultiAutocompleteFieldVariableSize'; import { Formik } from 'formik'; import React from 'react'; @@ -39,8 +39,8 @@ const activity: IMultiAutocompleteFieldOption[] = [ ]; describe('ProjectDetailsForm', () => { - it('renders correctly with default empty values', () => { - const { asFragment } = render( + it('renders correctly with default empty values', async () => { + const { getByLabelText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('Project Name', { exact: false })).toBeVisible(); + }); }); - it('renders correctly with existing details values', () => { + it('renders correctly with existing details values', async () => { const existingFormValues: IProjectDetailsForm = { - project_name: 'name 1', - project_type: 2, - project_activities: [2, 3], - start_date: '2021-03-14', - end_date: '2021-04-14' + project: { + project_name: 'name 1', + project_type: 2, + project_activities: [2, 3], + start_date: '2021-03-14', + end_date: '2021-04-14' + } }; - const { asFragment } = render( + const { getByLabelText, getByText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('Project Name', { exact: false })).toBeVisible(); + expect(getByText('type 2', { exact: false })).toBeVisible(); + }); }); }); diff --git a/app/src/features/projects/components/ProjectDetailsForm.tsx b/app/src/features/projects/components/ProjectDetailsForm.tsx index ce1fa187f2..a93ef3c1eb 100644 --- a/app/src/features/projects/components/ProjectDetailsForm.tsx +++ b/app/src/features/projects/components/ProjectDetailsForm.tsx @@ -10,30 +10,37 @@ import MultiAutocompleteFieldVariableSize, { } from 'components/fields/MultiAutocompleteFieldVariableSize'; import StartEndDateFields from 'components/fields/StartEndDateFields'; import { useFormikContext } from 'formik'; +import { ICreateProjectRequest } from 'interfaces/useProjectApi.interface'; import React from 'react'; import yup from 'utils/YupSchema'; export interface IProjectDetailsForm { - project_name: string; - project_type: number; - project_activities: number[]; - start_date: string; - end_date: string; + project: { + project_name: string; + project_type: number; + project_activities: number[]; + start_date: string; + end_date: string; + }; } export const ProjectDetailsFormInitialValues: IProjectDetailsForm = { - project_name: '', - project_type: ('' as unknown) as number, - project_activities: [], - start_date: '', - end_date: '' + project: { + project_name: '', + project_type: ('' as unknown) as number, + project_activities: [], + start_date: '', + end_date: '' + } }; export const ProjectDetailsFormYupSchema = yup.object().shape({ - project_name: yup.string().max(300, 'Cannot exceed 300 characters').required('Required'), - project_type: yup.number().required('Required'), - start_date: yup.string().isValidDateString().required('Required'), - end_date: yup.string().isValidDateString().isEndDateSameOrAfterStartDate('start_date') + project: yup.object().shape({ + project_name: yup.string().max(300, 'Cannot exceed 300 characters').required('Project Name is Required'), + project_type: yup.number().required('Project Type is Required'), + start_date: yup.string().isValidDateString().required('Start Date is Required'), + end_date: yup.string().isValidDateString().isEndDateSameOrAfterStartDate('start_date') + }) }); export interface IProjectDetailsFormProps { @@ -47,7 +54,7 @@ export interface IProjectDetailsFormProps { * @return {*} */ const ProjectDetailsForm: React.FC = (props) => { - const formikProps = useFormikContext(); + const formikProps = useFormikContext(); const { values, touched, errors, handleChange, handleSubmit } = formikProps; @@ -56,7 +63,7 @@ const ProjectDetailsForm: React.FC = (props) => { = (props) => { /> - + Project Type - {touched.project_type && errors.project_type} + {errors.project?.project_type && ( + {touched.project?.project_type && errors.project?.project_type} + )} - + + + ); diff --git a/app/src/features/projects/components/ProjectDraftForm.test.tsx b/app/src/features/projects/components/ProjectDraftForm.test.tsx index 1b88b4fe9c..ca1841e96c 100644 --- a/app/src/features/projects/components/ProjectDraftForm.test.tsx +++ b/app/src/features/projects/components/ProjectDraftForm.test.tsx @@ -1,5 +1,6 @@ -import { render } from '@testing-library/react'; +import { render, waitFor } from '@testing-library/react'; import ProjectDraftForm, { + IProjectDraftForm, ProjectDraftFormInitialValues, ProjectDraftFormYupSchema } from 'features/projects/components/ProjectDraftForm'; @@ -8,13 +9,9 @@ import React from 'react'; const handleSaveAndNext = jest.fn(); -const projectDraftFilledValues = { - draft_name: 'draft test name' -}; - describe('Project Draft Form', () => { - it('renders correctly with empty initial values', () => { - const { asFragment } = render( + it('renders correctly with empty initial values', async () => { + const { getByLabelText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('Draft Name', { exact: false })).toBeVisible(); + }); }); - it('renders correctly with populated initial values', () => { - const { asFragment } = render( + it('renders correctly with populated initial values', async () => { + const projectDraftFilledValues: IProjectDraftForm = { + draft_name: 'draft test name' + }; + + const { getByLabelText, getByDisplayValue } = render( { - handleSaveAndNext(values); - }}> + onSubmit={async () => {}}> {() => } ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('Draft Name', { exact: false })).toBeVisible(); + expect(getByDisplayValue('draft test name', { exact: false })).toBeVisible(); + }); }); - it('renders correctly with errors', () => { - const { asFragment } = render( + it('renders correctly with errors', async () => { + const projectDraftFilledValues: IProjectDraftForm = { + draft_name: 'draft test name' + }; + + const { getByLabelText, getByText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + await waitFor(() => { + expect(getByLabelText('Draft Name', { exact: false })).toBeVisible(); + expect(getByText('Error this is a required field', { exact: false })).toBeVisible(); + }); }); }); diff --git a/app/src/features/projects/components/ProjectFundingForm.test.tsx b/app/src/features/projects/components/ProjectFundingForm.test.tsx index 456dc4d124..87d373212c 100644 --- a/app/src/features/projects/components/ProjectFundingForm.test.tsx +++ b/app/src/features/projects/components/ProjectFundingForm.test.tsx @@ -45,8 +45,8 @@ const investment_action_category: IInvestmentActionCategoryOption[] = [ ]; describe('ProjectFundingForm', () => { - it('renders correctly with default empty values', () => { - const { baseElement } = render( + it('renders correctly with default empty values', async () => { + const { queryByText } = render( { ); - expect(baseElement).toMatchSnapshot(); + await waitFor(() => { + expect(queryByText('Add Funding Source')).toBeInTheDocument(); + }); }); - it('renders correctly with existing funding values', () => { + it('renders correctly with existing funding values', async () => { const existingFormValues: IProjectFundingForm = { - funding_sources: [ - { - id: 11, - agency_id: 1, - investment_action_category: 1, - investment_action_category_name: 'Action 23', - agency_project_id: '111', - funding_amount: 222, - start_date: '2021-03-14', - end_date: '2021-04-14', - revision_count: 23 - } - ] + funding: { + fundingSources: [ + { + id: 11, + agency_id: 1, + investment_action_category: 1, + investment_action_category_name: 'Action 23', + agency_project_id: '111', + funding_amount: 222, + start_date: '2021-03-14', + end_date: '2021-04-14', + revision_count: 23 + } + ] + } }; - const { baseElement } = render( + const { queryByText } = render( { ); - expect(baseElement).toMatchSnapshot(); + await waitFor(() => { + expect(queryByText('Add Funding Source')).toBeInTheDocument(); + expect(queryByText('111')).toBeInTheDocument(); + }); }); it('shows add funding source dialog on add click', async () => { const existingFormValues: IProjectFundingForm = { - funding_sources: [ - { - id: 11, - agency_id: 1, - investment_action_category: 1, - investment_action_category_name: 'action 1', - agency_project_id: '111', - funding_amount: 222, - start_date: '2021-03-14', - end_date: '2021-04-14', - revision_count: 23 - }, - { - id: 12, - agency_id: 2, - investment_action_category: 2, - investment_action_category_name: 'category 1', - agency_project_id: '112', - funding_amount: 223, - start_date: '2021-03-15', - end_date: '2021-04-15', - revision_count: 24 - } - ] + funding: { + fundingSources: [ + { + id: 11, + agency_id: 1, + investment_action_category: 1, + investment_action_category_name: 'action 1', + agency_project_id: '111', + funding_amount: 222, + start_date: '2021-03-14', + end_date: '2021-04-14', + revision_count: 23 + }, + { + id: 12, + agency_id: 2, + investment_action_category: 2, + investment_action_category_name: 'category 1', + agency_project_id: '112', + funding_amount: 223, + start_date: '2021-03-15', + end_date: '2021-04-15', + revision_count: 24 + } + ] + } }; const { getByTestId, queryByText } = render( @@ -149,19 +158,21 @@ describe('ProjectFundingForm', () => { it('shows edit funding source dialog on edit click', async () => { await act(async () => { const existingFormValues: IProjectFundingForm = { - funding_sources: [ - { - id: 11, - agency_id: 1, - investment_action_category: 1, - investment_action_category_name: 'action 1', - agency_project_id: '111', - funding_amount: 222, - start_date: '2021-03-14', - end_date: '2021-04-14', - revision_count: 23 - } - ] + funding: { + fundingSources: [ + { + id: 11, + agency_id: 1, + investment_action_category: 1, + investment_action_category_name: 'action 1', + agency_project_id: '111', + funding_amount: 222, + start_date: '2021-03-14', + end_date: '2021-04-14', + revision_count: 23 + } + ] + } }; const { getByTestId, getByText, queryByText } = render( @@ -197,19 +208,21 @@ describe('ProjectFundingForm', () => { it('deletes funding source dialog on delete click', async () => { await act(async () => { const existingFormValues: IProjectFundingForm = { - funding_sources: [ - { - id: 11, - agency_id: 1, - investment_action_category: 1, - investment_action_category_name: 'action 1', - agency_project_id: '111', - funding_amount: 222, - start_date: '2021-03-14', - end_date: '2021-04-14', - revision_count: 23 - } - ] + funding: { + fundingSources: [ + { + id: 11, + agency_id: 1, + investment_action_category: 1, + investment_action_category_name: 'action 1', + agency_project_id: '111', + funding_amount: 222, + start_date: '2021-03-14', + end_date: '2021-04-14', + revision_count: 23 + } + ] + } }; const { getByTestId, queryByTestId } = render( diff --git a/app/src/features/projects/components/ProjectFundingForm.tsx b/app/src/features/projects/components/ProjectFundingForm.tsx index a0b5336368..6c213d4971 100644 --- a/app/src/features/projects/components/ProjectFundingForm.tsx +++ b/app/src/features/projects/components/ProjectFundingForm.tsx @@ -17,6 +17,7 @@ import { IMultiAutocompleteFieldOption } from 'components/fields/MultiAutocomple import { DATE_FORMAT } from 'constants/dateTimeFormats'; import { AddFundingI18N } from 'constants/i18n'; import { FieldArray, useFormikContext } from 'formik'; +import { ICreateProjectRequest } from 'interfaces/useProjectApi.interface'; import React, { useState } from 'react'; import { getFormattedAmount, getFormattedDateRangeString } from 'utils/Utils'; import yup from 'utils/YupSchema'; @@ -27,11 +28,15 @@ import ProjectFundingItemForm, { } from './ProjectFundingItemForm'; export interface IProjectFundingForm { - funding_sources: IProjectFundingFormArrayItem[]; + funding: { + fundingSources: IProjectFundingFormArrayItem[]; + }; } export const ProjectFundingFormInitialValues: IProjectFundingForm = { - funding_sources: [] + funding: { + fundingSources: [] + } }; export const ProjectFundingFormYupSchema = yup.object().shape({}); @@ -48,6 +53,8 @@ export interface IProjectFundingFormProps { const useStyles = makeStyles((theme: Theme) => ({ title: { flexGrow: 1, + paddingTop: 0, + paddingBottom: 0, marginRight: '1rem', whiteSpace: 'nowrap', overflow: 'hidden', @@ -58,19 +65,10 @@ const useStyles = makeStyles((theme: Theme) => ({ marginLeft: theme.spacing(1), fontWeight: 400 }, - fundingListIem: { - padding: 0, - '& + li': { - marginTop: theme.spacing(2) + fundingListItem: { + '& .MuiPaper-root': { + width: '100%' } - }, - fundingListItemInner: { - flexGrow: 1, - flexShrink: 1, - overflow: 'hidden' - }, - fundingListItemToolbar: { - paddingRight: theme.spacing(2) } })); @@ -82,8 +80,8 @@ const useStyles = makeStyles((theme: Theme) => ({ const ProjectFundingForm: React.FC = (props) => { const classes = useStyles(); - const formikProps = useFormikContext(); - const { values } = formikProps; + const formikProps = useFormikContext(); + const { values, handleSubmit } = formikProps; //Tracks information about the current funding source item that is being added/edited const [currentProjectFundingFormArrayItem, setCurrentProjectFundingFormArrayItem] = useState({ @@ -94,32 +92,29 @@ const ProjectFundingForm: React.FC = (props) => { const [isModalOpen, setIsModalOpen] = useState(false); return ( -
    + - - Funding Sources ({values.funding_sources.length}) - - + ( - + = (props) => { }} onCancel={() => setIsModalOpen(false)} onSave={(projectFundingItemValues) => { - if (currentProjectFundingFormArrayItem.index < values.funding_sources.length) { + if (currentProjectFundingFormArrayItem.index < values.funding.fundingSources.length) { // Update an existing item arrayHelpers.replace(currentProjectFundingFormArrayItem.index, projectFundingItemValues); } else { @@ -147,15 +142,8 @@ const ProjectFundingForm: React.FC = (props) => { setIsModalOpen(false); }} /> - - {!values.funding_sources.length && ( - - - No Funding Sources - - - )} - {values.funding_sources.map((fundingSource, index) => { + + {values.funding.fundingSources.map((fundingSource, index) => { const investment_action_category_label = (fundingSource.agency_id === 1 && 'Investment Action') || (fundingSource.agency_id === 2 && 'Investment Category') || @@ -166,32 +154,29 @@ const ProjectFundingForm: React.FC = (props) => { )?.[0]?.label; return ( - - - + + + {getCodeValueNameByID(props.funding_sources, fundingSource.agency_id)} {investment_action_category_label && ( ({investment_action_category_value}) )} - { setCurrentProjectFundingFormArrayItem({ index: index, - values: values.funding_sources[index] + values: values.funding.fundingSources[index] }); setIsModalOpen(true); }}> = (props) => { - - - + + + Agency Project ID diff --git a/app/src/features/projects/components/ProjectIUCNForm.test.tsx b/app/src/features/projects/components/ProjectIUCNForm.test.tsx index d2f000cbe1..e96d75d4b5 100644 --- a/app/src/features/projects/components/ProjectIUCNForm.test.tsx +++ b/app/src/features/projects/components/ProjectIUCNForm.test.tsx @@ -49,7 +49,7 @@ const subClassifications2: IIUCNSubClassification2Option[] = [ describe('ProjectIUCNForm', () => { it('renders correctly with default empty values', () => { - const { asFragment } = render( + const { queryByLabelText } = render( { )} ); - - expect(asFragment()).toMatchSnapshot(); + expect(queryByLabelText('Classification')).toBe(null); + expect(queryByLabelText('Sub-classification')).toBe(null); }); it('renders correctly with existing details values', () => { const existingFormValues: IProjectIUCNForm = { - classificationDetails: [ - { - classification: 1, - subClassification1: 3, - subClassification2: 5 - } - ] + iucn: { + classificationDetails: [ + { + classification: 1, + subClassification1: 3, + subClassification2: 5 + } + ] + } }; - const { asFragment } = render( + const { getByLabelText, getByText, getAllByLabelText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + expect(getByLabelText('Classification')).toBeVisible(); + expect(getAllByLabelText('Sub-classification').length).toEqual(2); + expect(getByText('Class 1')).toBeVisible(); + expect(getByText('A Sub-class 1', { exact: false })).toBeVisible(); + expect(getByText('A Sub-class 2', { exact: false })).toBeVisible(); }); it('changes fields on the IUCN menu items as expected', async () => { - const { asFragment, getAllByRole, getByRole, queryByTestId, getByText } = render( + const { getByLabelText, getAllByRole, getByRole, queryByTestId, getByText, getAllByLabelText } = render( { fireEvent.click(subClassification2Listbox.getByText(/A Sub-class 2/i)); await waitFor(() => { - expect(asFragment()).toMatchSnapshot(); + expect(getByLabelText('Classification')).toBeVisible(); + expect(getAllByLabelText('Sub-classification').length).toEqual(2); + expect(getByText('Class 1')).toBeVisible(); + expect(getByText('A Sub-class 1', { exact: false })).toBeVisible(); + expect(getByText('A Sub-class 2', { exact: false })).toBeVisible(); }); }); @@ -172,18 +182,20 @@ describe('ProjectIUCNForm', () => { it('renders correctly with error on the iucn classifications due to duplicates', () => { const existingFormValues: IProjectIUCNForm = { - classificationDetails: [ - { - classification: 1, - subClassification1: 3, - subClassification2: 5 - }, - { - classification: 1, - subClassification1: 3, - subClassification2: 5 - } - ] + iucn: { + classificationDetails: [ + { + classification: 1, + subClassification1: 3, + subClassification2: 5 + }, + { + classification: 1, + subClassification1: 3, + subClassification2: 5 + } + ] + } }; const { getAllByText, getByText } = render( @@ -192,7 +204,7 @@ describe('ProjectIUCNForm', () => { validationSchema={ProjectIUCNFormYupSchema} validateOnBlur={true} validateOnChange={false} - initialErrors={{ classificationDetails: 'Error is here' }} + initialErrors={{ iucn: { classificationDetails: 'Error is here' } }} onSubmit={async () => {}}> {() => ( { expect(getByText('Error is here')).toBeVisible(); }); - it('renders correctly with error on the iucn classification individual fields', () => { - const existingFormValues: IProjectIUCNForm = { - classificationDetails: [ - { - classification: 1, - subClassification1: 3, - subClassification2: 5 - } - ] - }; - - const { getByText } = render( - {}}> - {() => ( - - )} - - ); - - expect(getByText('Class 1')).toBeVisible(); - expect(getByText('A Sub-class 1')).toBeVisible(); - expect(getByText('A Sub-class 2')).toBeVisible(); - expect(getByText('Error here')).toBeVisible(); - expect(getByText('Error here too')).toBeVisible(); - expect(getByText('Error again here too')).toBeVisible(); - }); - it('deletes existing iucn classifications when delete icon is clicked', async () => { const existingFormValues: IProjectIUCNForm = { - classificationDetails: [ - { - classification: 1, - subClassification1: 3, - subClassification2: 5 - } - ] + iucn: { + classificationDetails: [ + { + classification: 1, + subClassification1: 3, + subClassification2: 5 + } + ] + } }; const { getByTestId, queryByTestId } = render( diff --git a/app/src/features/projects/components/ProjectIUCNForm.tsx b/app/src/features/projects/components/ProjectIUCNForm.tsx index f8d193115a..552a096ea2 100644 --- a/app/src/features/projects/components/ProjectIUCNForm.tsx +++ b/app/src/features/projects/components/ProjectIUCNForm.tsx @@ -6,8 +6,6 @@ import IconButton from '@material-ui/core/IconButton'; import InputLabel from '@material-ui/core/InputLabel'; import MenuItem from '@material-ui/core/MenuItem'; import Select from '@material-ui/core/Select'; -import { Theme } from '@material-ui/core/styles/createMuiTheme'; -import makeStyles from '@material-ui/core/styles/makeStyles'; import Typography from '@material-ui/core/Typography'; import { mdiArrowRight, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; import Icon from '@mdi/react'; @@ -16,15 +14,6 @@ import { FieldArray, useFormikContext } from 'formik'; import React from 'react'; import yup from 'utils/YupSchema'; -const useStyles = makeStyles((theme: Theme) => ({ - iucnInputContainer: { - overflow: 'hidden' - }, - iucnInput: { - width: '250px' - } -})); - export interface IProjectIUCNFormArrayItem { classification: number; subClassification1: number; @@ -32,7 +21,9 @@ export interface IProjectIUCNFormArrayItem { } export interface IProjectIUCNForm { - classificationDetails: IProjectIUCNFormArrayItem[]; + iucn: { + classificationDetails: IProjectIUCNFormArrayItem[]; + }; } export const ProjectIUCNFormArrayItemInitialValues: IProjectIUCNFormArrayItem = { @@ -42,7 +33,9 @@ export const ProjectIUCNFormArrayItemInitialValues: IProjectIUCNFormArrayItem = }; export const ProjectIUCNFormInitialValues: IProjectIUCNForm = { - classificationDetails: [] + iucn: { + classificationDetails: [] + } }; export interface IIUCNSubClassification1Option extends IMultiAutocompleteFieldOption { @@ -54,16 +47,18 @@ export interface IIUCNSubClassification2Option extends IMultiAutocompleteFieldOp } export const ProjectIUCNFormYupSchema = yup.object().shape({ - classificationDetails: yup - .array() - .of( - yup.object().shape({ - classification: yup.number().required('You must specify a classification'), - subClassification1: yup.number().required('You must specify a sub-classification'), - subClassification2: yup.number().required('You must specify a sub-classification') - }) - ) - .isUniqueIUCNClassificationDetail('IUCN Classifications must be unique') + iucn: yup.object().shape({ + classificationDetails: yup + .array() + .of( + yup.object().shape({ + classification: yup.number().required('You must specify a classification'), + subClassification1: yup.number().required('You must specify a sub-classification'), + subClassification2: yup.number().required('You must specify a sub-classification') + }) + ) + .isUniqueIUCNClassificationDetail('IUCN Classifications must be unique') + }) }); export interface IProjectIUCNFormProps { @@ -78,17 +73,15 @@ export interface IProjectIUCNFormProps { * @return {*} */ const ProjectIUCNForm: React.FC = (props) => { - const classes = useStyles(); - const { values, handleChange, handleSubmit, getFieldMeta, errors } = useFormikContext(); return ( ( - {values.classificationDetails.map((classificationDetail, index) => { + {values.iucn.classificationDetails.map((classificationDetail, index) => { const classificationMeta = getFieldMeta(`classificationDetails.[${index}].classification`); const subClassification1Meta = getFieldMeta(`classificationDetails.[${index}].subClassification1`); const subClassification2Meta = getFieldMeta(`classificationDetails.[${index}].subClassification2`); @@ -101,105 +94,93 @@ const ProjectIUCNForm: React.FC = (props) => { mb={1} data-testid="iucn-classification-grid" key={index}> - - - - Classification - { + classificationDetail.subClassification1 = ('' as unknown) as number; + classificationDetail.subClassification2 = ('' as unknown) as number; + handleChange(e); + }} + error={classificationMeta.touched && Boolean(classificationMeta.error)} + inputProps={{ 'aria-label': 'Classification' }}> + {props.classifications.map((item: any) => ( + + {item.label} + + ))} + + {classificationMeta.touched && classificationMeta.error} + + + + + + + + Sub-classification + - {classificationMeta.touched && classificationMeta.error} - - - - + + {subClassification1Meta.touched && subClassification1Meta.error} + + - - - - Sub-classification - - - {subClassification1Meta.touched && subClassification1Meta.error} - - - - - - - - - - - Sub-classification - - - {subClassification2Meta.touched && subClassification2Meta.error} - - - - - + + + Sub-classification + + {subClassification2Meta.touched && subClassification2Meta.error} + + + arrayHelpers.remove(index)}> @@ -209,9 +190,11 @@ const ProjectIUCNForm: React.FC = (props) => { ); })} - {errors?.classificationDetails && !Array.isArray(errors?.classificationDetails) && ( + {errors?.iucn?.classificationDetails && !Array.isArray(errors?.iucn?.classificationDetails) && ( - {errors.classificationDetails} + + {errors.iucn?.classificationDetails} + )} diff --git a/app/src/features/projects/components/ProjectLocationForm.test.tsx b/app/src/features/projects/components/ProjectLocationForm.test.tsx index 5b67ad849b..3289d35d5f 100644 --- a/app/src/features/projects/components/ProjectLocationForm.test.tsx +++ b/app/src/features/projects/components/ProjectLocationForm.test.tsx @@ -11,7 +11,7 @@ jest.spyOn(console, 'debug').mockImplementation(() => {}); describe('ProjectLocationForm', () => { it('renders correctly with default empty values', async () => { - const { asFragment } = render( + const { getByLabelText, getByText } = render( { ); await waitFor(() => { - expect(asFragment()).toMatchSnapshot(); + expect(getByText('Define Project Boundary', { exact: false })).toBeVisible(); + expect(getByLabelText('Location Description', { exact: false })).toBeVisible(); }); }); it('renders correctly with existing location values', async () => { const existingFormValues: IProjectLocationForm = { - location_description: 'a location description', - geometry: [ - { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [125.6, 10.1] - }, - properties: { - name: 'Dinagat Islands' + location: { + location_description: 'a location description', + geometry: [ + { + type: 'Feature', + geometry: { + type: 'Point', + coordinates: [125.6, 10.1] + }, + properties: { + name: 'Dinagat Islands' + } } - } - ] + ] + } }; - const { asFragment } = render( + const { getByLabelText, getByText } = render( { ); await waitFor(() => { - expect(asFragment()).toMatchSnapshot(); - }); - }); - - it('renders correctly with errors on fields', async () => { - const existingFormValues: IProjectLocationForm = { - location_description: 'a location description', - geometry: [ - { - type: 'Feature', - geometry: { - type: 'Point', - coordinates: [125.6, 10.1] - }, - properties: { - name: 'Dinagat Islands' - } - } - ] - }; - - const { asFragment } = render( - {}}> - {() => } - - ); - - await waitFor(() => { - expect(asFragment()).toMatchSnapshot(); + expect(getByText('Define Project Boundary', { exact: false })).toBeVisible(); + expect(getByLabelText('Location Description', { exact: false })).toBeVisible(); + expect(getByText('a location description')).toBeVisible(); }); }); }); diff --git a/app/src/features/projects/components/ProjectLocationForm.tsx b/app/src/features/projects/components/ProjectLocationForm.tsx index 30d9f99e1b..4e09ccc8b2 100644 --- a/app/src/features/projects/components/ProjectLocationForm.tsx +++ b/app/src/features/projects/components/ProjectLocationForm.tsx @@ -1,4 +1,5 @@ -import Grid from '@material-ui/core/Grid'; +import Box from '@material-ui/core/Box'; +import Typography from '@material-ui/core/Typography'; import MapBoundary from 'components/boundary/MapBoundary'; import CustomTextField from 'components/fields/CustomTextField'; import { useFormikContext } from 'formik'; @@ -7,18 +8,24 @@ import React from 'react'; import yup from 'utils/YupSchema'; export interface IProjectLocationForm { - location_description: string; - geometry: Feature[]; + location: { + location_description: string; + geometry: Feature[]; + }; } export const ProjectLocationFormInitialValues: IProjectLocationForm = { - location_description: '', - geometry: [] + location: { + location_description: '', + geometry: [] + } }; export const ProjectLocationFormYupSchema = yup.object().shape({ - location_description: yup.string().max(3000, 'Cannot exceed 3000 characters'), - geometry: yup.array().min(1, 'You must specify a project boundary').required('You must specify a project boundary') + location: yup.object().shape({ + location_description: yup.string().max(3000, 'Cannot exceed 3000 characters'), + geometry: yup.array().min(1, 'A project boundary is required').required('A project boundary is required') + }) }); /** @@ -33,22 +40,23 @@ const ProjectLocationForm = () => { return ( - - - - - + + + Describe the location of this project + + - + ); }; diff --git a/app/src/features/projects/components/ProjectObjectivesForm.test.tsx b/app/src/features/projects/components/ProjectObjectivesForm.test.tsx index 1c4afff7f0..24b90bb2c7 100644 --- a/app/src/features/projects/components/ProjectObjectivesForm.test.tsx +++ b/app/src/features/projects/components/ProjectObjectivesForm.test.tsx @@ -9,7 +9,7 @@ import ProjectObjectivesForm, { describe('ProjectObjectivesForm', () => { it('renders correctly with default empty values', () => { - const { asFragment } = render( + const { getByLabelText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + expect(getByLabelText('Objectives', { exact: false })).toBeVisible(); }); it('renders correctly with existing objective/caveat values', () => { const existingFormValues: IProjectObjectivesForm = { - objectives: 'a project objective', - caveats: 'a nice little caveat' + objectives: { + objectives: 'a project objective' + } }; - const { asFragment } = render( + const { getByLabelText, getByText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + expect(getByLabelText('Objectives', { exact: false })).toBeVisible(); + expect(getByText('a project objective')).toBeVisible(); }); }); diff --git a/app/src/features/projects/components/ProjectObjectivesForm.tsx b/app/src/features/projects/components/ProjectObjectivesForm.tsx index 2ccb80a88f..eb3ca3c283 100644 --- a/app/src/features/projects/components/ProjectObjectivesForm.tsx +++ b/app/src/features/projects/components/ProjectObjectivesForm.tsx @@ -5,21 +5,21 @@ import React from 'react'; import yup from 'utils/YupSchema'; export interface IProjectObjectivesForm { - objectives: string; - caveats: string; + objectives: { + objectives: string; + }; } export const ProjectObjectivesFormInitialValues: IProjectObjectivesForm = { - objectives: '', - caveats: '' + objectives: { + objectives: '' + } }; export const ProjectObjectivesFormYupSchema = yup.object().shape({ - objectives: yup - .string() - .max(3000, 'Cannot exceed 3000 characters') - .required('You must provide objectives for the project'), - caveats: yup.string().max(3000, 'Cannot exceed 3000 characters') + objectives: yup.object().shape({ + objectives: yup.string().max(3000, 'Cannot exceed 3000 characters').required('Objectives are required') + }) }); /** @@ -36,10 +36,11 @@ const ProjectObjectivesForm = () => {
    - - - - +
    diff --git a/app/src/features/projects/components/ProjectPartnershipsForm.test.tsx b/app/src/features/projects/components/ProjectPartnershipsForm.test.tsx index c7ffb9f846..2427336c12 100644 --- a/app/src/features/projects/components/ProjectPartnershipsForm.test.tsx +++ b/app/src/features/projects/components/ProjectPartnershipsForm.test.tsx @@ -32,7 +32,7 @@ const stakeholder_partnerships: IMultiAutocompleteFieldOption[] = [ describe('ProjectPartnershipsForm', () => { it('renders correctly with default empty values', () => { - const { asFragment } = render( + const { getByLabelText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + expect(getByLabelText('Indigenous Partnerships', { exact: false })).toBeVisible(); + expect(getByLabelText('Other Partnerships', { exact: false })).toBeVisible(); }); it('renders correctly with existing funding values', () => { const existingFormValues: IProjectPartnershipsForm = { - indigenous_partnerships: [1, 2], - stakeholder_partnerships: ['partner 1'] + partnerships: { + indigenous_partnerships: [1, 2], + stakeholder_partnerships: [(1 as unknown) as string] + } }; - const { asFragment } = render( + const { getByLabelText, getByText } = render( { ); - expect(asFragment()).toMatchSnapshot(); + expect(getByLabelText('Indigenous Partnerships', { exact: false })).toBeVisible(); + expect(getByLabelText('Other Partnerships', { exact: false })).toBeVisible(); + expect(getByText('nation 1')).toBeVisible(); + expect(getByText('nation 2')).toBeVisible(); + expect(getByText('partner 1', { exact: false })).toBeVisible(); }); }); diff --git a/app/src/features/projects/components/ProjectPartnershipsForm.tsx b/app/src/features/projects/components/ProjectPartnershipsForm.tsx index 7e2a7bd90a..6f2a10defa 100644 --- a/app/src/features/projects/components/ProjectPartnershipsForm.tsx +++ b/app/src/features/projects/components/ProjectPartnershipsForm.tsx @@ -7,13 +7,17 @@ import React from 'react'; import yup from 'utils/YupSchema'; export interface IProjectPartnershipsForm { - indigenous_partnerships: number[]; - stakeholder_partnerships: string[]; + partnerships: { + indigenous_partnerships: number[]; + stakeholder_partnerships: string[]; + }; } export const ProjectPartnershipsFormInitialValues: IProjectPartnershipsForm = { - indigenous_partnerships: [], - stakeholder_partnerships: [] + partnerships: { + indigenous_partnerships: [], + stakeholder_partnerships: [] + } }; export const ProjectPartnershipsFormYupSchema = yup.object().shape({}); @@ -38,7 +42,7 @@ const ProjectPartnershipsForm: React.FC = (props) = (props) = (props) => { - const { values, handleChange, handleSubmit, getFieldMeta, errors } = useFormikContext(); - - return ( -
    - ( - - - {values.permits?.map((permit, index) => { - const permitNumberMeta = getFieldMeta(`permits.[${index}].permit_number`); - const permitTypeMeta = getFieldMeta(`permits.[${index}].permit_type`); - - return ( - - - - - - - - Permit Type - - {permitTypeMeta.touched && permitTypeMeta.error} - - - - arrayHelpers.remove(index)}> - - - - - - ); - })} - - {errors?.permits && !Array.isArray(errors?.permits) && ( - - {errors.permits} - - )} - - - - - )} - /> - - ); -}; - -export default ProjectPermitForm; diff --git a/app/src/features/projects/components/__snapshots__/ProjectCoordinatorForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectCoordinatorForm.test.tsx.snap deleted file mode 100644 index 38f5817a24..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectCoordinatorForm.test.tsx.snap +++ /dev/null @@ -1,755 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`Project Contact Form renders correctly the empty component correctly 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    - -
    -
    -
    -
    - - Share Contact Details - -

    - Do you want the project contact’s name and email address visible to the public? -

    -
    -
    - - -

    -

    -
    -
    -
    -
    -
    -`; - -exports[`Project Contact Form renders correctly the filled component correctly 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    - -
    -
    -
    -
    - - Share Contact Details - -

    - Do you want the project contact’s name and email address visible to the public? -

    -
    -
    - - -

    -

    -
    -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectDetailsForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectDetailsForm.test.tsx.snap deleted file mode 100644 index 67baab312b..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectDetailsForm.test.tsx.snap +++ /dev/null @@ -1,709 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectDetailsForm renders correctly with default empty values 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - - ​ - -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    -
    -
    -`; - -exports[`ProjectDetailsForm renders correctly with existing details values 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - type 2 -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectDraftForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectDraftForm.test.tsx.snap deleted file mode 100644 index e2524fdd0f..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectDraftForm.test.tsx.snap +++ /dev/null @@ -1,161 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`Project Draft Form renders correctly with empty initial values 1`] = ` - -
    -
    - -
    - - -
    -
    -
    -
    -`; - -exports[`Project Draft Form renders correctly with errors 1`] = ` - -
    -
    - -
    - - -
    -

    - Error this is a required field -

    -
    -
    -
    -`; - -exports[`Project Draft Form renders correctly with populated initial values 1`] = ` - -
    -
    - -
    - - -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectFundingForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectFundingForm.test.tsx.snap deleted file mode 100644 index 87ed2b8cc0..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectFundingForm.test.tsx.snap +++ /dev/null @@ -1,277 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectFundingForm renders correctly with default empty values 1`] = ` - -
    -
    -
    -
    -

    - Funding Sources ( - 0 - ) -

    - -
    -
    -
    -
      -
      -
      -
      - No Funding Sources -
      -
      -
      -
    -
    -
    -
    -
    -
    - -`; - -exports[`ProjectFundingForm renders correctly with existing funding values 1`] = ` - -
    -
    -
    -
    -

    - Funding Sources ( - 1 - ) -

    - -
    -
    -
    -
      -
    • -
      -
      -

      - Funding source code - - ( - Investment action category - ) - -

      - - -
      -
      -
      -
      -
      -

      - Agency Project ID -

      -

      - 111 -

      -
      -
      -

      - Funding Amount -

      -

      - $222 -

      -
      -
      -

      - Start / End Date -

      -

      - Mar 14, 2021 - Apr 14, 2021 -

      -
      -
      -
      -
      -
    • -
    -
    -
    -
    -
    -
    - -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectIUCNForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectIUCNForm.test.tsx.snap deleted file mode 100644 index 0aa6e371ed..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectIUCNForm.test.tsx.snap +++ /dev/null @@ -1,637 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectIUCNForm changes fields on the IUCN menu items as expected 1`] = ` - -
    -
    -
    -
    -
    -
    - -
    -
    - Class 1 -
    - - - -
    -

    -

    -
    -
    - - - -
    -
    -
    - -
    -
    - A Sub-class 1 -
    - - - -
    -

    -

    -
    -
    - - - -
    -
    -
    - -
    -
    - A Sub-class 2 -
    - - - -
    -

    -

    -
    -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -`; - -exports[`ProjectIUCNForm renders correctly with default empty values 1`] = ` - -
    -
    -
    - -
    -
    -
    -
    -`; - -exports[`ProjectIUCNForm renders correctly with existing details values 1`] = ` - -
    -
    -
    -
    -
    -
    - -
    -
    - Class 1 -
    - - - -
    -

    -

    -
    -
    - - - -
    -
    -
    - -
    -
    - A Sub-class 1 -
    - - - -
    -

    -

    -
    -
    - - - -
    -
    -
    - -
    -
    - A Sub-class 2 -
    - - - -
    -

    -

    -
    -
    -
    - -
    -
    -
    - -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectLocationForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectLocationForm.test.tsx.snap deleted file mode 100644 index e0cd113762..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectLocationForm.test.tsx.snap +++ /dev/null @@ -1,1324 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectLocationForm renders correctly with default empty values 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -

    - error is here -

    -
    -
    -
    -

    - Project Boundary * -

    -
    -

    - Define your boundary by selecting a boundary from an existing layer or by uploading KML file or shapefile. -

    -
    -

    - To select a boundary from an existing layer, select a layer from the dropdown, click a boundary on the map and click 'Add Boundary'. -

    -
    -
    -
    - -
    -
    - -
    -
    - - ​ - -
    - - - -
    -
    -
    -
    -
    -
    -
    - - - -`; - -exports[`ProjectLocationForm renders correctly with existing location values 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -

    - Project Boundary * -

    -
    -

    - Define your boundary by selecting a boundary from an existing layer or by uploading KML file or shapefile. -

    -
    -

    - To select a boundary from an existing layer, select a layer from the dropdown, click a boundary on the map and click 'Add Boundary'. -

    -
    -
    -
    - -
    -
    - -
    -
    - - ​ - -
    - - - -
    -
    -
    -
    -
    -
    -
    - - - -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectObjectivesForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectObjectivesForm.test.tsx.snap deleted file mode 100644 index f6dc2187ed..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectObjectivesForm.test.tsx.snap +++ /dev/null @@ -1,201 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectObjectivesForm renders correctly with default empty values 1`] = ` - -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectPartnershipsForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectPartnershipsForm.test.tsx.snap deleted file mode 100644 index 47e6be2d7e..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectPartnershipsForm.test.tsx.snap +++ /dev/null @@ -1,487 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectPartnershipsForm renders correctly with default empty values 1`] = ` - -
    -
    -
    - -
    -
    - -
    -
    -
    -
    -`; - -exports[`ProjectPartnershipsForm renders correctly with existing funding values 1`] = ` - -
    -
    -
    - -
    -
    - -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/components/__snapshots__/ProjectPermitForm.test.tsx.snap b/app/src/features/projects/components/__snapshots__/ProjectPermitForm.test.tsx.snap deleted file mode 100644 index 82c54de28d..0000000000 --- a/app/src/features/projects/components/__snapshots__/ProjectPermitForm.test.tsx.snap +++ /dev/null @@ -1,930 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`ProjectPermitForm renders correctly with default empty values 1`] = ` - -
    -
    -
    -
    - -
    -
    - - -`; - -exports[`ProjectPermitForm renders correctly with error on the permits field due to duplicates 1`] = ` - -
    -
    -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - Park Use Permit -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - Scientific Fish Collection Permit -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    -
    -

    - Error is here -

    -
    -
    - -
    -
    -
    -
    -`; - -exports[`ProjectPermitForm renders correctly with errors on the permit_number and permit_type fields 1`] = ` - -
    -
    -
    -
    -
    -
    -
    - -
    - - -
    -

    - Error here -

    -
    -
    -
    -
    - -
    -
    - Scientific Fish Collection Permit -
    - - - -
    -

    - Error here as well -

    -
    -
    -
    - -
    -
    -
    -
    -
    - -
    -
    -
    -
    -`; - -exports[`ProjectPermitForm renders correctly with existing permit values 1`] = ` - -
    -
    -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - Park Use Permit -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    -
    - Scientific Fish Collection Permit -
    - - - -
    -

    -

    -
    -
    - -
    -
    -
    -
    -
    - -
    -
    -
    -
    -`; diff --git a/app/src/features/projects/create/CreateProjectForm.tsx b/app/src/features/projects/create/CreateProjectForm.tsx new file mode 100644 index 0000000000..0512e2f1ce --- /dev/null +++ b/app/src/features/projects/create/CreateProjectForm.tsx @@ -0,0 +1,283 @@ +import { Box, Button, Divider, Typography } from '@material-ui/core'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; +import HorizontalSplitFormComponent from 'components/fields/HorizontalSplitFormComponent'; +import { ScrollToFormikError } from 'components/formik/ScrollToFormikError'; +import { Formik, FormikProps } from 'formik'; +import { useQuery } from 'hooks/useQuery'; +import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; +import { ICreateProjectRequest } from 'interfaces/useProjectApi.interface'; +import React from 'react'; +import ProjectCoordinatorForm, { + ProjectCoordinatorInitialValues, + ProjectCoordinatorYupSchema +} from '../components/ProjectCoordinatorForm'; +import ProjectDetailsForm, { + ProjectDetailsFormInitialValues, + ProjectDetailsFormYupSchema +} from '../components/ProjectDetailsForm'; +import ProjectFundingForm, { + ProjectFundingFormInitialValues, + ProjectFundingFormYupSchema +} from '../components/ProjectFundingForm'; +import ProjectIUCNForm, { ProjectIUCNFormInitialValues, ProjectIUCNFormYupSchema } from '../components/ProjectIUCNForm'; +import ProjectLocationForm, { + ProjectLocationFormInitialValues, + ProjectLocationFormYupSchema +} from '../components/ProjectLocationForm'; +import ProjectObjectivesForm, { + ProjectObjectivesFormInitialValues, + ProjectObjectivesFormYupSchema +} from '../components/ProjectObjectivesForm'; +import ProjectPartnershipsForm, { + ProjectPartnershipsFormInitialValues, + ProjectPartnershipsFormYupSchema +} from '../components/ProjectPartnershipsForm'; + +const useStyles = makeStyles((theme: Theme) => ({ + actionButton: { + minWidth: '6rem', + '& + button': { + marginLeft: '0.5rem' + } + }, + sectionDivider: { + height: '1px', + marginTop: theme.spacing(5), + marginBottom: theme.spacing(5) + }, + breadCrumbLink: { + display: 'flex', + alignItems: 'center', + cursor: 'pointer' + }, + breadCrumbLinkIcon: { + marginRight: '0.25rem' + } +})); + +export interface ICreateProjectForm { + codes: IGetAllCodeSetsResponse; + handleSubmit: (formikData: ICreateProjectRequest) => void; + handleCancel: () => void; + handleDraft: (value: React.SetStateAction) => void; + handleDeleteDraft: (value: React.SetStateAction) => void; + formikRef: React.RefObject>; +} + +export const initialProjectFieldData: ICreateProjectRequest = { + ...ProjectDetailsFormInitialValues, + ...ProjectObjectivesFormInitialValues, + ...ProjectCoordinatorInitialValues, + ...ProjectLocationFormInitialValues, + ...ProjectIUCNFormInitialValues, + ...ProjectFundingFormInitialValues, + ...ProjectPartnershipsFormInitialValues +}; + +export const validationProjectYupSchema = ProjectCoordinatorYupSchema.concat(ProjectDetailsFormYupSchema) + .concat(ProjectObjectivesFormYupSchema) + .concat(ProjectLocationFormYupSchema) + .concat(ProjectIUCNFormYupSchema) + .concat(ProjectFundingFormYupSchema) + .concat(ProjectPartnershipsFormYupSchema); + +/** + * Form for creating a new project. + * + * @return {*} + */ +const CreateProjectForm: React.FC = (props) => { + const { codes, formikRef } = props; + + const classes = useStyles(); + const queryParams = useQuery(); + + const handleSubmit = async (formikData: ICreateProjectRequest) => { + props.handleSubmit(formikData); + }; + + const handleCancel = () => { + props.handleCancel(); + }; + + const handleDraft = () => { + props.handleDraft(true); + }; + + const handleDeleteDraft = () => { + props.handleDeleteDraft(true); + }; + + return ( + + + <> + + + + { + return { value: item.id, label: item.name }; + }) || [] + } + activity={ + codes?.activity?.map((item) => { + return { value: item.id, label: item.name }; + }) || [] + } + /> + + + + + + IUCN Conservation Actions Classification + + + Conservation actions are specific actions or sets of tasks undertaken by project staff designed to + reach each of the project's objectives. + + + + { + return { value: item.id, label: item.name }; + }) || [] + } + subClassifications1={ + codes?.iucn_conservation_action_level_2_subclassification?.map((item) => { + return { value: item.id, iucn1_id: item.iucn1_id, label: item.name }; + }) || [] + } + subClassifications2={ + codes?.iucn_conservation_action_level_3_subclassification?.map((item) => { + return { value: item.id, iucn2_id: item.iucn2_id, label: item.name }; + }) || [] + } + /> + + + + }> + + + + { + return item.name; + }) || [] + } + /> + }> + + + + + + + Funding Sources + + + Specify funding sources for the project. Note: Dollar amounts are not intended to + be exact, please round to the nearest 100. + + + { + return { value: item.id, label: item.name }; + }) || [] + } + investment_action_category={ + codes?.investment_action_category?.map((item) => { + return { value: item.id, fs_id: item.fs_id, label: item.name }; + }) || [] + } + /> + + + + + Partnerships + + + Additional partnerships that have not been previously identified as a funding sources. + + + { + return { value: item.id, label: item.name }; + }) || [] + } + stakeholder_partnerships={ + codes?.funding_source?.map((item) => { + return { value: item.name, label: item.name }; + }) || [] + } + /> + + + + }> + + + + }> + + + + + + + + + {queryParams.draftId && ( + + )} + + + + ); +}; + +export default CreateProjectForm; diff --git a/app/src/features/projects/create/CreateProjectPage.test.tsx b/app/src/features/projects/create/CreateProjectPage.test.tsx index c6e513d3cc..96b1ba2e61 100644 --- a/app/src/features/projects/create/CreateProjectPage.test.tsx +++ b/app/src/features/projects/create/CreateProjectPage.test.tsx @@ -4,7 +4,6 @@ import { fireEvent, getByText as rawGetByText, render, - screen, waitFor } from '@testing-library/react'; import { DialogContextProvider } from 'contexts/dialogContext'; @@ -14,10 +13,11 @@ import { ProjectIUCNFormInitialValues } from 'features/projects/components/Proje import { ProjectLocationFormInitialValues } from 'features/projects/components/ProjectLocationForm'; import { ProjectObjectivesFormInitialValues } from 'features/projects/components/ProjectObjectivesForm'; import { ProjectPartnershipsFormInitialValues } from 'features/projects/components/ProjectPartnershipsForm'; -import { ProjectPermitFormInitialValues } from 'features/projects/components/ProjectPermitForm'; import CreateProjectPage from 'features/projects/create/CreateProjectPage'; +import { Feature } from 'geojson'; import { createMemoryHistory } from 'history'; import { useBiohubApi } from 'hooks/useBioHubApi'; +import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; import React from 'react'; import { MemoryRouter, Router } from 'react-router'; @@ -26,15 +26,16 @@ const history = createMemoryHistory(); jest.mock('../../../hooks/useBioHubApi'); const mockUseBiohubApi = { codes: { - getAllCodeSets: jest.fn, []>() + getAllCodeSets: jest.fn, []>() }, draft: { createDraft: jest.fn, []>(), updateDraft: jest.fn, []>(), + deleteDraft: jest.fn(), getDraft: jest.fn() }, - permit: { - getNonSamplingPermits: jest.fn, []>() + external: { + post: jest.fn, []>() } }; @@ -59,7 +60,6 @@ describe('CreateProjectPage', () => { mockBiohubApi().draft.createDraft.mockClear(); mockBiohubApi().draft.updateDraft.mockClear(); mockBiohubApi().draft.getDraft.mockClear(); - mockBiohubApi().permit.getNonSamplingPermits.mockClear(); jest.spyOn(console, 'debug').mockImplementation(() => {}); }); @@ -69,39 +69,39 @@ describe('CreateProjectPage', () => { }); it('renders the initial default page correctly', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + } as unknown) as IGetAllCodeSetsResponse); + + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); - const { getByText, getAllByText, asFragment } = renderContainer(); + const { getByText } = renderContainer(); await waitFor(() => { - expect(getAllByText('Project Contact').length).toEqual(2); - - expect(getByText('Project Permits')).toBeVisible(); + expect(getByText('Create Project')).toBeVisible(); expect(getByText('General Information')).toBeVisible(); - expect(getByText('Objectives')).toBeVisible(); - - expect(getByText('Locations')).toBeVisible(); - - expect(getByText('IUCN Conservation Actions Classification')).toBeVisible(); - - expect(getByText('Funding')).toBeVisible(); + expect(getByText('Project Coordinator')).toBeVisible(); - expect(getByText('Partnerships')).toBeVisible(); + expect(getByText('Funding and Partnerships')).toBeVisible(); - expect(asFragment()).toMatchSnapshot(); + expect(getByText('Location and Boundary')).toBeVisible(); }); }); it('shows the page title', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] - }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); + } as unknown) as IGetAllCodeSetsResponse); const { findByText } = renderContainer(); const PageTitle = await findByText('Create Project'); @@ -109,50 +109,21 @@ describe('CreateProjectPage', () => { expect(PageTitle).toBeVisible(); }); - it('navigates to a different section on click of that section label', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ - coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] - }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); - - const { getByText, getAllByText, queryByLabelText } = renderContainer(); - - // wait for initial page to load - await waitFor(() => { - expect(getAllByText('Project Contact').length).toEqual(2); - - expect(getByText('Project Permits')).toBeVisible(); - - expect(getByText('General Information')).toBeVisible(); - - expect(queryByLabelText('Project Type')).toBeNull(); - }); - - fireEvent.click(getByText('General Information')); - - await waitFor(() => { - expect(getAllByText('General Information').length).toEqual(2); - - expect(queryByLabelText('Project Type')).toBeVisible(); - }); - }); - describe('Are you sure? Dialog', () => { it('shows warning dialog if the user clicks the `Cancel and Exit` button', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] - }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); + } as unknown) as IGetAllCodeSetsResponse); history.push('/home'); history.push('/admin/projects/create'); - const { findByText, getByRole } = renderContainer(); - const BackToProjectsButton = await findByText('Cancel and Exit', { exact: false }); + const { findByText, getByRole, findAllByText } = renderContainer(); + const BackToProjectsButton = await findAllByText('Cancel'); - fireEvent.click(BackToProjectsButton); - const AreYouSureTitle = await findByText('Cancel Create Project'); - const AreYouSureText = await findByText('Are you sure you want to cancel?'); + fireEvent.click(BackToProjectsButton[0]); + const AreYouSureTitle = await findByText('Cancel Project Creation'); + const AreYouSureText = await findByText('Are you sure you want to cancel?', { exact: false }); const AreYouSureYesButton = await rawFindByText(getByRole('dialog'), 'Yes', { exact: false }); expect(AreYouSureTitle).toBeVisible(); @@ -161,18 +132,17 @@ describe('CreateProjectPage', () => { }); it('calls history.push() if the user clicks `Yes`', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] - }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); + } as unknown) as IGetAllCodeSetsResponse); history.push('/home'); history.push('/admin/projects/create'); - const { findByText, getByRole } = renderContainer(); - const BackToProjectsButton = await findByText('Cancel and Exit', { exact: false }); + const { findAllByText, getByRole } = renderContainer(); + const BackToProjectsButton = await findAllByText('Cancel'); - fireEvent.click(BackToProjectsButton); + fireEvent.click(BackToProjectsButton[0]); const AreYouSureYesButton = await rawFindByText(getByRole('dialog'), 'Yes', { exact: false }); expect(history.location.pathname).toEqual('/admin/projects/create'); @@ -181,19 +151,18 @@ describe('CreateProjectPage', () => { }); it('does nothing if the user clicks `No`', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] - }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); + } as unknown) as IGetAllCodeSetsResponse); history.push('/home'); history.push('/admin/projects/create'); - const { findByText, getByRole } = renderContainer(); - const BackToProjectsButton = await findByText('Cancel and Exit', { exact: false }); + const { findAllByText, getByRole } = renderContainer(); + const BackToProjectsButton = await findAllByText('Cancel'); - fireEvent.click(BackToProjectsButton); - const AreYouSureNoButton = await rawFindByText(getByRole('dialog'), 'No', { exact: false }); + fireEvent.click(BackToProjectsButton[0]); + const AreYouSureNoButton = await rawFindByText(getByRole('dialog'), 'No'); expect(history.location.pathname).toEqual('/admin/projects/create'); fireEvent.click(AreYouSureNoButton); @@ -202,14 +171,199 @@ describe('CreateProjectPage', () => { }); describe('draft project', () => { - beforeEach(() => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ - coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe('Delete Draft Button', () => { + it('does not display delete draft button if not in draft', async () => { + const { queryByText } = render( + + + + ); + + await waitFor(() => { + expect(queryByText('Delete Draft', { exact: false })).not.toBeInTheDocument(); + }); + }); + + it('does display delete draft button if in draft', async () => { + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ + coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + } as unknown) as IGetAllCodeSetsResponse); + + mockBiohubApi().draft.getDraft.mockResolvedValue({ + id: 1, + name: 'My draft', + data: { + coordinator: { + first_name: 'Draft first name', + last_name: 'Draft last name', + email_address: 'draftemail@example.com', + coordinator_agency: '', + share_contact_details: 'false' + }, + project: ProjectDetailsFormInitialValues.project, + objectives: ProjectObjectivesFormInitialValues.objectives, + location: ProjectLocationFormInitialValues.location, + iucn: ProjectIUCNFormInitialValues.iucn, + funding: ProjectFundingFormInitialValues.funding, + partnerships: ProjectPartnershipsFormInitialValues.partnerships + } + }); + + const { queryAllByText } = render( + + + + ); + + await waitFor(() => { + expect(queryAllByText('Delete Draft', { exact: false }).length).toEqual(2); + }); + }); + + it('dispalys a Delete draft Yes/No Dialog', async () => { + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ + coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + } as unknown) as IGetAllCodeSetsResponse); + + mockBiohubApi().draft.getDraft.mockResolvedValue({ + id: 1, + name: 'My draft', + data: { + coordinator: { + first_name: 'Draft first name', + last_name: 'Draft last name', + email_address: 'draftemail@example.com', + coordinator_agency: '', + share_contact_details: 'false' + }, + project: ProjectDetailsFormInitialValues.project, + objectives: ProjectObjectivesFormInitialValues.objectives, + location: ProjectLocationFormInitialValues.location, + iucn: ProjectIUCNFormInitialValues.iucn, + funding: ProjectFundingFormInitialValues.funding, + partnerships: ProjectPartnershipsFormInitialValues.partnerships + } + }); + + const { getByText, findAllByText } = render( + + + + ); + + const deleteButton = await findAllByText('Delete Draft', { exact: false }); + + fireEvent.click(deleteButton[0]); + + await waitFor(() => { + expect(getByText('Are you sure you want to delete this draft?', { exact: false })).toBeInTheDocument(); + }); + }); + + it('closes dialog on No click', async () => { + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ + coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + } as unknown) as IGetAllCodeSetsResponse); + + mockBiohubApi().draft.getDraft.mockResolvedValue({ + id: 1, + name: 'My draft', + data: { + coordinator: { + first_name: 'Draft first name', + last_name: 'Draft last name', + email_address: 'draftemail@example.com', + coordinator_agency: '', + share_contact_details: 'false' + }, + project: ProjectDetailsFormInitialValues.project, + objectives: ProjectObjectivesFormInitialValues.objectives, + location: ProjectLocationFormInitialValues.location, + iucn: ProjectIUCNFormInitialValues.iucn, + funding: ProjectFundingFormInitialValues.funding, + partnerships: ProjectPartnershipsFormInitialValues.partnerships + } + }); + + const { getByText, findAllByText, getByTestId, queryByText } = render( + + + + ); + + const deleteButton = await findAllByText('Delete Draft', { exact: false }); + + fireEvent.click(deleteButton[0]); + + await waitFor(() => { + expect(getByText('Are you sure you want to delete this draft?')).toBeInTheDocument(); + }); + + const NoButton = await getByTestId('no-button'); + fireEvent.click(NoButton); + + await waitFor(() => { + expect(queryByText('Are you sure you want to delete this draft?')).not.toBeInTheDocument(); + }); + }); + + it('deletes draft on Yes click', async () => { + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ + coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + } as unknown) as IGetAllCodeSetsResponse); + + mockBiohubApi().draft.getDraft.mockResolvedValue({ + id: 1, + name: 'My draft', + data: { + coordinator: { + first_name: 'Draft first name', + last_name: 'Draft last name', + email_address: 'draftemail@example.com', + coordinator_agency: '', + share_contact_details: 'false' + }, + project: ProjectDetailsFormInitialValues.project, + objectives: ProjectObjectivesFormInitialValues.objectives, + location: ProjectLocationFormInitialValues.location, + iucn: ProjectIUCNFormInitialValues.iucn, + funding: ProjectFundingFormInitialValues.funding, + partnerships: ProjectPartnershipsFormInitialValues.partnerships + } + }); + + const { getByText, findAllByText, getByTestId } = render( + + + + ); + + const deleteButton = await findAllByText('Delete Draft', { exact: false }); + + fireEvent.click(deleteButton[0]); + + await waitFor(() => { + expect(getByText('Are you sure you want to delete this draft?')).toBeInTheDocument(); + }); + + const YesButton = await getByTestId('yes-button'); + fireEvent.click(YesButton); + + await waitFor(() => { + expect(mockBiohubApi().draft.deleteDraft).toBeCalled(); + }); }); - mockBiohubApi().permit.getNonSamplingPermits.mockResolvedValue([{ permit_id: 1, number: 1, type: 'Wildlife' }]); }); it('preloads draft data and populates on form fields', async () => { + mockBiohubApi().codes.getAllCodeSets.mockResolvedValue(({ + coordinator_agency: [{ id: 1, name: 'A Rocha Canada' }] + } as unknown) as IGetAllCodeSetsResponse); + mockBiohubApi().draft.getDraft.mockResolvedValue({ id: 1, name: 'My draft', @@ -221,50 +375,49 @@ describe('CreateProjectPage', () => { coordinator_agency: '', share_contact_details: 'false' }, - permit: ProjectPermitFormInitialValues, - project: ProjectDetailsFormInitialValues, - objectives: ProjectObjectivesFormInitialValues, - location: ProjectLocationFormInitialValues, - iucn: ProjectIUCNFormInitialValues, - funding: ProjectFundingFormInitialValues, - partnerships: ProjectPartnershipsFormInitialValues + project: ProjectDetailsFormInitialValues.project, + objectives: ProjectObjectivesFormInitialValues.objectives, + location: ProjectLocationFormInitialValues.location, + iucn: ProjectIUCNFormInitialValues.iucn, + funding: ProjectFundingFormInitialValues.funding, + partnerships: ProjectPartnershipsFormInitialValues.partnerships } }); - render( + const { getByDisplayValue } = render( ); await waitFor(() => { - expect(screen.getByDisplayValue('Draft first name')).toBeInTheDocument(); - expect(screen.getByDisplayValue('Draft last name')).toBeInTheDocument(); - expect(screen.getByDisplayValue('draftemail@example.com')).toBeInTheDocument(); + expect(getByDisplayValue('Draft first name', { exact: false })).toBeInTheDocument(); + expect(getByDisplayValue('Draft last name', { exact: false })).toBeInTheDocument(); + expect(getByDisplayValue('draftemail@example.com', { exact: false })).toBeInTheDocument(); }); }); it('opens the save as draft and exit dialog', async () => { - const { getByText, findByText } = renderContainer(); + const { getByLabelText, findAllByText } = renderContainer(); - const saveAsDraftButton = await findByText('Save as Draft and Exit'); + const saveAsDraftButton = await findAllByText('Save Draft'); - fireEvent.click(saveAsDraftButton); + fireEvent.click(saveAsDraftButton[0]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); }); it('closes the dialog on cancel button click', async () => { - const { getByText, findByText, queryByText, getByRole } = renderContainer(); + const { getByLabelText, findAllByText, getByRole, queryByLabelText } = renderContainer(); - const saveAsDraftButton = await findByText('Save as Draft and Exit'); + const saveAsDraftButton = await findAllByText('Save Draft'); - fireEvent.click(saveAsDraftButton); + fireEvent.click(saveAsDraftButton[1]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); const cancelButton = rawGetByText(getByRole('dialog'), 'Cancel'); @@ -272,24 +425,24 @@ describe('CreateProjectPage', () => { fireEvent.click(cancelButton); await waitFor(() => { - expect(queryByText('Save Incomplete Project as a Draft')).not.toBeInTheDocument(); + expect(queryByLabelText('Draft Name *')).not.toBeInTheDocument(); }); }); - it('calls the createDraft/updateDraft functions and closes the dialog on save button click', async () => { + it.skip('calls the createDraft/updateDraft functions and closes the dialog on save button click', async () => { mockBiohubApi().draft.createDraft.mockResolvedValue({ id: 1, date: '2021-01-20' }); - const { getByText, findByText, queryByText, getByLabelText } = renderContainer(); + const { getByText, findAllByText, queryByLabelText, getByLabelText } = renderContainer(); - const saveAsDraftButton = await findByText('Save as Draft and Exit'); + const saveAsDraftButton = await findAllByText('Save Draft'); - fireEvent.click(saveAsDraftButton); + fireEvent.click(saveAsDraftButton[0]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); fireEvent.change(getByLabelText('Draft Name *'), { target: { value: 'draft name' } }); @@ -299,13 +452,13 @@ describe('CreateProjectPage', () => { await waitFor(() => { expect(mockBiohubApi().draft.createDraft).toHaveBeenCalledWith('draft name', expect.any(Object)); - expect(queryByText('Save Incomplete Project as a Draft')).not.toBeInTheDocument(); + expect(queryByLabelText('Draft Name *')).not.toBeInTheDocument(); }); - fireEvent.click(getByText('Save as Draft and Exit')); + fireEvent.click(saveAsDraftButton[0]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); fireEvent.change(getByLabelText('Draft Name *'), { target: { value: 'draft name' } }); @@ -315,7 +468,7 @@ describe('CreateProjectPage', () => { await waitFor(() => { expect(mockBiohubApi().draft.updateDraft).toHaveBeenCalledWith(1, 'draft name', expect.any(Object)); - expect(queryByText('Save Incomplete Project as a Draft')).not.toBeInTheDocument(); + expect(queryByLabelText('Draft Name *')).not.toBeInTheDocument(); }); }); @@ -325,22 +478,22 @@ describe('CreateProjectPage', () => { date: '2021-01-20' }); - const { getByText, getAllByText, findByText, queryByText, getByLabelText } = renderContainer(); + const { getByText, findAllByText, getByLabelText, queryByLabelText } = renderContainer(); // wait for initial page to load await waitFor(() => { - expect(getAllByText('Project Contact').length).toEqual(2); + expect(getByText('General Information')).toBeVisible(); }); // update first name field fireEvent.change(getByLabelText('First Name *'), { target: { value: 'draft first name' } }); - const saveAsDraftButton = await findByText('Save as Draft and Exit'); + const saveAsDraftButton = await findAllByText('Save Draft'); - fireEvent.click(saveAsDraftButton); + fireEvent.click(saveAsDraftButton[0]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); fireEvent.change(getByLabelText('Draft Name *'), { target: { value: 'draft name' } }); @@ -356,25 +509,30 @@ describe('CreateProjectPage', () => { coordinator_agency: '', share_contact_details: 'false' }, - permit: expect.any(Object), - project: expect.any(Object), - objectives: expect.any(Object), - location: expect.any(Object), - iucn: expect.any(Object), - funding: expect.any(Object), - partnerships: expect.any(Object) + project: { + project_name: '', + project_type: ('' as unknown) as number, + project_activities: [], + start_date: '', + end_date: '' + }, + objectives: { objectives: '' }, + location: { location_description: '', geometry: [] }, + iucn: { classificationDetails: [] }, + funding: { fundingSources: [] }, + partnerships: { indigenous_partnerships: [], stakeholder_partnerships: [] } }); - expect(queryByText('Save Incomplete Project as a Draft')).not.toBeInTheDocument(); + expect(queryByLabelText('Draft Name *')).not.toBeInTheDocument(); }); // update last name field fireEvent.change(getByLabelText('Last Name *'), { target: { value: 'draft last name' } }); - fireEvent.click(getByText('Save as Draft and Exit')); + fireEvent.click(saveAsDraftButton[0]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); fireEvent.change(getByLabelText('Draft Name *'), { target: { value: 'draft name' } }); @@ -390,16 +548,21 @@ describe('CreateProjectPage', () => { coordinator_agency: '', share_contact_details: 'false' }, - permit: expect.any(Object), - project: expect.any(Object), - objectives: expect.any(Object), - location: expect.any(Object), - iucn: expect.any(Object), - funding: expect.any(Object), - partnerships: expect.any(Object) + project: { + project_name: '', + project_type: ('' as unknown) as number, + project_activities: [], + start_date: '', + end_date: '' + }, + objectives: { objectives: '' }, + location: { location_description: '', geometry: [] }, + iucn: { classificationDetails: [] }, + funding: { fundingSources: [] }, + partnerships: { indigenous_partnerships: [], stakeholder_partnerships: [] } }); - expect(queryByText('Save Incomplete Project as a Draft')).not.toBeInTheDocument(); + expect(queryByLabelText('Draft Name *')).not.toBeInTheDocument(); }); }); @@ -408,14 +571,14 @@ describe('CreateProjectPage', () => { throw new Error('Draft failed exception!'); }); - const { getByText, findByText, queryByText, getByLabelText } = renderContainer(); + const { getByText, findAllByText, getByLabelText, queryByLabelText } = renderContainer(); - const saveAsDraftButton = await findByText('Save as Draft and Exit'); + const saveAsDraftButton = await findAllByText('Save Draft'); - fireEvent.click(saveAsDraftButton); + fireEvent.click(saveAsDraftButton[0]); await waitFor(() => { - expect(getByText('Save Incomplete Project as a Draft')).toBeVisible(); + expect(getByLabelText('Draft Name *')).toBeVisible(); }); fireEvent.change(getByLabelText('Draft Name *'), { target: { value: 'draft name' } }); @@ -423,7 +586,7 @@ describe('CreateProjectPage', () => { fireEvent.click(getByText('Save')); await waitFor(() => { - expect(queryByText('Save Incomplete Project as a Draft')).not.toBeInTheDocument(); + expect(queryByLabelText('Draft Name *')).not.toBeInTheDocument(); }); }); }); diff --git a/app/src/features/projects/create/CreateProjectPage.tsx b/app/src/features/projects/create/CreateProjectPage.tsx index b3f28d946f..76f57ec44f 100644 --- a/app/src/features/projects/create/CreateProjectPage.tsx +++ b/app/src/features/projects/create/CreateProjectPage.tsx @@ -1,107 +1,55 @@ import Box from '@material-ui/core/Box'; -import Breadcrumbs from '@material-ui/core/Breadcrumbs'; import Button from '@material-ui/core/Button'; import CircularProgress from '@material-ui/core/CircularProgress'; import Container from '@material-ui/core/Container'; -import Link from '@material-ui/core/Link'; +import Paper from '@material-ui/core/Paper'; import { Theme } from '@material-ui/core/styles/createMuiTheme'; import makeStyles from '@material-ui/core/styles/makeStyles'; import Typography from '@material-ui/core/Typography'; -import ArrowBack from '@material-ui/icons/ArrowBack'; import EditDialog from 'components/dialog/EditDialog'; import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; -import StepperWizard, { IStepperWizardStep } from 'components/stepper-wizard/StepperWizard'; -import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { CreateProjectDraftI18N, CreateProjectI18N } from 'constants/i18n'; +import YesNoDialog from 'components/dialog/YesNoDialog'; +import { CreateProjectDraftI18N, CreateProjectI18N, DeleteProjectDraftI18N } from 'constants/i18n'; import { DialogContext } from 'contexts/dialogContext'; -import { - ProjectCoordinatorInitialValues, - ProjectCoordinatorYupSchema -} from 'features/projects/components/ProjectCoordinatorForm'; -import { - ProjectDetailsFormInitialValues, - ProjectDetailsFormYupSchema -} from 'features/projects/components/ProjectDetailsForm'; import ProjectDraftForm, { IProjectDraftForm, - ProjectDraftFormInitialValues, ProjectDraftFormYupSchema } from 'features/projects/components/ProjectDraftForm'; -import { - ProjectFundingFormInitialValues, - ProjectFundingFormYupSchema -} from 'features/projects/components/ProjectFundingForm'; -import { ProjectIUCNFormInitialValues, ProjectIUCNFormYupSchema } from 'features/projects/components/ProjectIUCNForm'; -import { - ProjectLocationFormInitialValues, - ProjectLocationFormYupSchema -} from 'features/projects/components/ProjectLocationForm'; -import { - ProjectObjectivesFormInitialValues, - ProjectObjectivesFormYupSchema -} from 'features/projects/components/ProjectObjectivesForm'; -import { - ProjectPartnershipsFormInitialValues, - ProjectPartnershipsFormYupSchema -} from 'features/projects/components/ProjectPartnershipsForm'; -import ProjectPermitForm, { - ProjectPermitFormInitialValues, - ProjectPermitFormYupSchema -} from 'features/projects/components/ProjectPermitForm'; import { FormikProps } from 'formik'; import * as History from 'history'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; +import useDataLoader from 'hooks/useDataLoader'; import { useQuery } from 'hooks/useQuery'; -import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; -import { IGetNonSamplingPermit } from 'interfaces/usePermitApi.interface'; import { ICreateProjectRequest } from 'interfaces/useProjectApi.interface'; -import React, { useCallback, useContext, useEffect, useRef, useState } from 'react'; +import React, { useContext, useEffect, useRef, useState } from 'react'; import { useHistory } from 'react-router'; import { Prompt } from 'react-router-dom'; -import { validateFormFieldsAndReportCompletion } from 'utils/customValidation'; -import ProjectStepComponents from 'utils/ProjectStepComponents'; -import { getFormattedDate } from 'utils/Utils'; +import CreateProjectForm from './CreateProjectForm'; const useStyles = makeStyles((theme: Theme) => ({ - actionButton: { - minWidth: '6rem', - '& + button': { - marginLeft: '0.5rem' - } - }, - breadCrumbLink: { - display: 'flex', - alignItems: 'center', - cursor: 'pointer' - }, - breadCrumbLinkIcon: { - marginRight: '0.25rem' - }, - finishContainer: { - padding: theme.spacing(3), - backgroundColor: 'transparent' - }, - stepper: { - backgroundColor: 'transparent' - }, - stepTitle: { - marginBottom: '0.45rem' + pageTitleContainer: { + maxWidth: '170ch', + overflow: 'hidden', + textOverflow: 'ellipsis' }, - stepperContainer: { - display: 'flex', - flex: '1 1 auto', - overflowX: 'hidden' + pageTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + paddingTop: theme.spacing(0.5), + paddingBottom: theme.spacing(0.5), + overflow: 'hidden' }, - stepperNav: { - flex: '0 0 auto', - width: '33.333%' - }, - stepperContent: {} + pageTitleActions: { + paddingTop: theme.spacing(0.75), + paddingBottom: theme.spacing(0.75), + '& button': { + marginLeft: theme.spacing(1) + } + } })); -const NUM_ALL_PROJECT_STEPS = 8; - /** * Page for creating a new project. * @@ -116,32 +64,40 @@ const CreateProjectPage: React.FC = () => { const queryParams = useQuery(); - const [codes, setCodes] = useState(); - const [nonSamplingPermits, setNonSamplingPermits] = useState((null as unknown) as []); - const [isLoadingCodes, setIsLoadingCodes] = useState(false); - const [isLoadingNonSamplingPermits, setIsLoadingNonSamplingPermits] = useState(false); - const [hasLoadedDraftData, setHasLoadedDraftData] = useState(!queryParams.draftId); - - // Tracks the active step # - const [activeStep, setActiveStep] = useState(0); - - // The number of steps listed in the project creation UI - const numberOfSteps = NUM_ALL_PROJECT_STEPS; - - // All possible step forms, and their current state - const [stepForms, setStepForms] = useState([]); - // Reference to pass to the formik component in order to access its state at any time // Used by the draft logic to fetch the values of a step form that has not been validated/completed - const formikRef = useRef>(null); - - const [showFormFieldValidationErrors, setShowFormFieldValidationErrors] = useState(null); + const formikRef = useRef>(null); // Ability to bypass showing the 'Are you sure you want to cancel' dialog const [enableCancelCheck, setEnableCancelCheck] = useState(true); const dialogContext = useContext(DialogContext); + const codesDataLoader = useDataLoader(() => biohubApi.codes.getAllCodeSets()); + codesDataLoader.load(); + + const draftDataLoader = useDataLoader((draftId: number) => biohubApi.draft.getDraft(draftId)); + + if (queryParams.draftId) { + draftDataLoader.load(queryParams.draftId); + } + + useEffect(() => { + const setFormikValues = (data: ICreateProjectRequest) => { + formikRef.current?.setValues(data); + }; + + if (draftDataLoader.data?.data) { + setFormikValues(draftDataLoader.data?.data); + } + }, [draftDataLoader]); + + // Whether or not to show the 'Save as draft' dialog + const [openDraftDialog, setOpenDraftDialog] = useState(false); + const [openDeleteDraftDialog, setOpenDeleteDraftDialog] = useState(false); + + const [draft, setDraft] = useState({ id: 0, date: '' }); + const defaultCancelDialogProps = { dialogTitle: CreateProjectI18N.cancelTitle, dialogText: CreateProjectI18N.cancelText, @@ -167,286 +123,34 @@ const CreateProjectPage: React.FC = () => { } }; - // Whether or not to show the 'Save as draft' dialog - const [openDraftDialog, setOpenDraftDialog] = useState(false); - - const [draft, setDraft] = useState({ id: 0, date: '' }); - const [initialProjectFieldData, setInitialProjectFieldData] = useState({ - coordinator: ProjectCoordinatorInitialValues, - permit: ProjectPermitFormInitialValues, - project: ProjectDetailsFormInitialValues, - objectives: ProjectObjectivesFormInitialValues, - location: ProjectLocationFormInitialValues, - iucn: ProjectIUCNFormInitialValues, - funding: ProjectFundingFormInitialValues, - partnerships: ProjectPartnershipsFormInitialValues - }); - - // Get non-sampling permits that already exist in system - useEffect(() => { - const getNonSamplingPermits = async () => { - const response = await biohubApi.permit.getNonSamplingPermits(); - - if (!response) { - return; - } - - setNonSamplingPermits(() => { - setIsLoadingNonSamplingPermits(false); - return response; - }); - }; - - if (!isLoadingNonSamplingPermits && !nonSamplingPermits) { - getNonSamplingPermits(); - setIsLoadingNonSamplingPermits(true); - } - }, [biohubApi, isLoadingNonSamplingPermits, nonSamplingPermits]); - - // Get draft project fields if draft id exists - useEffect(() => { - const getDraftProjectFields = async () => { - const response = await biohubApi.draft.getDraft(queryParams.draftId); - - setHasLoadedDraftData(true); - - if (!response || !response.data) { - return; - } - - setInitialProjectFieldData(response.data); - }; - - if (hasLoadedDraftData) { - return; - } - - getDraftProjectFields(); - }, [biohubApi.draft, hasLoadedDraftData, queryParams.draftId]); - - // Get code sets - // TODO refine this call to only fetch code sets this form cares about? Or introduce caching so multiple calls is still fast? - useEffect(() => { - const getAllCodeSets = async () => { - const response = await biohubApi.codes.getAllCodeSets(); - - // TODO error handling/user messaging - Cant create a project if required code sets fail to fetch - - setCodes(() => { - setIsLoadingCodes(false); - return response; - }); - }; - - if (!isLoadingCodes && !codes) { - getAllCodeSets(); - setIsLoadingCodes(true); - } - }, [biohubApi, isLoadingCodes, codes]); - - // Initialize the forms for each step of the workflow - useEffect(() => { - if (!codes || !hasLoadedDraftData || !nonSamplingPermits) { - return; - } - - if (stepForms.length) { - return; - } - - setStepForms([ - { - stepTitle: 'Project Contact', - stepSubTitle: - 'Enter the contact information for the person directly responsible for the project. This information will be used as the primary contact should questions arise about this project.', - stepContent: , - stepInitialValues: initialProjectFieldData.coordinator, - stepYupSchema: ProjectCoordinatorYupSchema, - isValid: false, - isTouched: false - }, - { - stepTitle: 'Project Permits', - stepSubTitle: - 'Enter your scientific collection, wildlife act and/or park use permits associated with this project. Provide the last 6 digits of the permit number. The last 6 digits are those after the hyphen (e.g. for KA12-845782 enter 845782).', - stepContent: ( - { - return { value: item.permit_id, label: `${item.number} - ${item.type}` }; - }) || [] - } - /> - ), - stepInitialValues: initialProjectFieldData.permit, - stepYupSchema: ProjectPermitFormYupSchema, - isValid: true, - isTouched: false - }, - { - stepTitle: 'General Information', - stepSubTitle: 'Enter general information and details about this project.', - stepContent: , - stepInitialValues: initialProjectFieldData.project, - stepYupSchema: ProjectDetailsFormYupSchema, - isValid: false, - isTouched: false - }, - { - stepTitle: 'Objectives', - stepSubTitle: - 'Describe the objectives of the project and list any caveats, or cautionary detail to be considered when evaluating, or interpreting this project.', - stepContent: , - stepInitialValues: initialProjectFieldData.objectives, - stepYupSchema: ProjectObjectivesFormYupSchema, - isValid: false, - isTouched: false - }, - { - stepTitle: 'Locations', - stepSubTitle: 'Specify a location description and spatial boundary information for the overall project area.', - stepContent: , - stepInitialValues: initialProjectFieldData.location, - stepYupSchema: ProjectLocationFormYupSchema, - isValid: false, - isTouched: false - }, - { - stepTitle: 'IUCN Conservation Actions Classification', - stepSubTitle: `Conservation actions are specific actions or sets of tasks undertaken by project staff designed to reach each of the project's objectives.`, - stepContent: , - stepInitialValues: initialProjectFieldData.iucn, - stepYupSchema: ProjectIUCNFormYupSchema, - isValid: true, - isTouched: false - }, - { - stepTitle: 'Funding', - stepSubTitle: - 'Specify funding sources for the project. Dollar amounts are not intended to be exact, please round to the nearest 100.', - stepContent: , - stepInitialValues: initialProjectFieldData.funding, - stepYupSchema: ProjectFundingFormYupSchema, - isValid: true, - isTouched: false - }, - { - stepTitle: 'Partnerships', - stepSubTitle: - 'Specify any indigenous partnerships for the project and/or any other partnerships that have not been previously identified in the funding sources section above.', - stepContent: , - stepInitialValues: initialProjectFieldData.partnerships, - stepYupSchema: ProjectPartnershipsFormYupSchema, - isValid: true, - isTouched: false - } - ]); - }, [codes, stepForms, initialProjectFieldData, hasLoadedDraftData, nonSamplingPermits]); - - /** - * Return true if the step form fields are valid, false otherwise. - * - * @return {*} {Promise} - */ - const isStepFormValid = useCallback(async (): Promise => { - if (!formikRef.current) { - return false; - } - - return validateFormFieldsAndReportCompletion(formikRef.current?.values, formikRef.current?.validateForm); - }, [formikRef]); - - const updateSteps = useCallback(async () => { - if (!formikRef?.current) { - return; - } - - const isValid = await isStepFormValid(); - - setStepForms((currentStepForms) => { - const updatedStepForms = [...currentStepForms]; - updatedStepForms[activeStep].stepInitialValues = formikRef.current?.values; - updatedStepForms[activeStep].isValid = isValid; - updatedStepForms[activeStep].isTouched = true; - return updatedStepForms; + const showDeleteDraftErrorDialog = (textDialogProps?: Partial) => { + dialogContext.setErrorDialog({ + dialogTitle: DeleteProjectDraftI18N.draftErrorTitle, + dialogText: DeleteProjectDraftI18N.draftErrorText, + ...defaultErrorDialogProps, + ...textDialogProps, + open: true }); - }, [activeStep, formikRef, isStepFormValid]); - - const handleSaveAndChangeStep = async (stepIndex: number) => { - await updateSteps(); - goToStep(stepIndex); }; - const handleSubmitProject = async () => { - await updateSteps(); - - const invalidStepIndex = getFirstInvalidFormStep(); - - // Check if any step is invalid in project workflow - const projectInvalid = invalidStepIndex >= 0; - - if (projectInvalid) { - // Automatically change to the invalid step - setActiveStep(invalidStepIndex); - // Indicate that the invalid step show run its field validation, to highlight the invalid fields - setShowFormFieldValidationErrors(invalidStepIndex); - return; - } - - await handleProjectCreation(); - }; - - useEffect(() => { - if (!formikRef?.current) { - return; - } - - if (showFormFieldValidationErrors !== activeStep) { - return; - } - - setShowFormFieldValidationErrors(null); - - // Submit the form, which will run the validation to indicate which fields are invalid - formikRef.current.submitForm(); - - // Update the step form isValid/isTouched - setStepForms((currentStepForms) => { - const updatedStepForms = [...currentStepForms]; - updatedStepForms[activeStep].isValid = false; - updatedStepForms[activeStep].isTouched = true; - return updatedStepForms; + const showDraftErrorDialog = (textDialogProps?: Partial) => { + dialogContext.setErrorDialog({ + dialogTitle: CreateProjectDraftI18N.draftErrorTitle, + dialogText: CreateProjectDraftI18N.draftErrorText, + ...defaultErrorDialogProps, + ...textDialogProps, + open: true }); - }, [showFormFieldValidationErrors, setShowFormFieldValidationErrors, formikRef, activeStep, updateSteps]); - - const handleSaveAndNext = async () => { - await updateSteps(); - goToNextStep(); }; - const handleSaveAndPrevious = async () => { - await updateSteps(); - goToPreviousStep(); - }; - - const goToNextStep = () => { - if (activeStep === numberOfSteps - 1) { - return; - } - - setActiveStep((prevActiveStep) => prevActiveStep + 1); - }; - - const goToPreviousStep = () => { - if (activeStep === 0) { - return; - } - - setActiveStep((prevActiveStep) => prevActiveStep - 1); - }; - - const goToStep = (stepIndex: number) => { - setActiveStep(stepIndex); + const showCreateErrorDialog = (textDialogProps?: Partial) => { + dialogContext.setErrorDialog({ + dialogTitle: CreateProjectI18N.createErrorTitle, + dialogText: CreateProjectI18N.createErrorText, + ...defaultErrorDialogProps, + ...textDialogProps, + open: true + }); }; const handleCancel = () => { @@ -461,16 +165,7 @@ const CreateProjectPage: React.FC = () => { // Get the form data for all steps // Fetch the data from the formikRef for whichever step is the active step // Why? WIP changes to the active step will not yet be updated into its respective stepForms[n].stepInitialValues - const draftFormData = { - coordinator: (activeStep === 0 && formikRef?.current?.values) || stepForms[0].stepInitialValues, - permit: (activeStep === 1 && formikRef?.current?.values) || stepForms[1].stepInitialValues, - project: (activeStep === 2 && formikRef?.current?.values) || stepForms[2].stepInitialValues, - objectives: (activeStep === 3 && formikRef?.current?.values) || stepForms[3].stepInitialValues, - location: (activeStep === 4 && formikRef?.current?.values) || stepForms[4].stepInitialValues, - iucn: (activeStep === 5 && formikRef?.current?.values) || stepForms[5].stepInitialValues, - funding: (activeStep === 6 && formikRef?.current?.values) || stepForms[6].stepInitialValues, - partnerships: (activeStep === 7 && formikRef?.current?.values) || stepForms[7].stepInitialValues - }; + const draftFormData = formikRef?.current?.values; const draftId = Number(queryParams.draftId) || draft?.id; @@ -505,46 +200,6 @@ const CreateProjectPage: React.FC = () => { } }; - /** - * Returns the step index for the first invalid form step, or `-1` if all steps are valid - * - * @return {*} {number} - */ - const getFirstInvalidFormStep = (): number => { - for (let i = 0; i < stepForms.length; i++) { - if (!stepForms[i].isValid) { - return i; - } - } - - // All steps are valid - return -1; - }; - - /** - * Handle project creation. - */ - const handleProjectCreation = async () => { - try { - await createProject({ - coordinator: stepForms[0].stepInitialValues, - permit: stepForms[1].stepInitialValues, - project: stepForms[2].stepInitialValues, - objectives: stepForms[3].stepInitialValues, - location: stepForms[4].stepInitialValues, - iucn: stepForms[5].stepInitialValues, - funding: stepForms[6].stepInitialValues, - partnerships: stepForms[7].stepInitialValues - }); - } catch (error) { - showCreateErrorDialog({ - dialogTitle: 'Error Creating Project', - dialogError: (error as APIError)?.message, - dialogErrorDetails: (error as APIError)?.errors - }); - } - }; - /** * Deletes the draft record used when creating this project, if one exists. * @@ -560,7 +215,8 @@ const CreateProjectPage: React.FC = () => { try { await biohubApi.draft.deleteDraft(draftId); - } catch (error) { + } catch (error: any) { + showDeleteDraftErrorDialog({ dialogError: error }); return error; } }; @@ -586,30 +242,6 @@ const CreateProjectPage: React.FC = () => { history.push(`/admin/projects/${response.id}`); }; - const showDraftErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ - dialogTitle: CreateProjectDraftI18N.draftErrorTitle, - dialogText: CreateProjectDraftI18N.draftErrorText, - ...defaultErrorDialogProps, - ...textDialogProps, - open: true - }); - }; - - const showCreateErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ - dialogTitle: CreateProjectI18N.createErrorTitle, - dialogText: CreateProjectI18N.createErrorText, - ...defaultErrorDialogProps, - ...textDialogProps, - open: true - }); - }; - - if (!stepForms.length) { - return ; - } - /** * Intercepts all navigation attempts (when used with a `Prompt`). * @@ -636,66 +268,91 @@ const CreateProjectPage: React.FC = () => { return true; }; + const handleDeleteDraft = async () => { + await deleteDraft(); + + setEnableCancelCheck(false); + + history.push(`/admin/projects/`); + }; + + if (!codesDataLoader.data) { + return ; + } + return ( <> , initialValues: { - draft_name: - (activeStep === 2 && formikRef.current?.values.project_name) || - stepForms[2].stepInitialValues.project_name || - ProjectDraftFormInitialValues.draft_name + draft_name: formikRef.current?.values.project.project_name || '' }, validationSchema: ProjectDraftFormYupSchema }} onCancel={() => setOpenDraftDialog(false)} onSave={(values) => handleSubmitDraft(values)} /> - + + setOpenDeleteDraftDialog(false)} + onNo={() => setOpenDeleteDraftDialog(false)} + onYes={() => handleDeleteDraft()} + /> + + - - - - - Cancel and Exit - - - - - Create Project - - - - - - {`Draft saved on ${getFormattedDate(DATE_FORMAT.ShortMediumDateTimeFormat, draft.date)}`} - + + + + + Create Project + + + + Configure and submit a new species inventory project + + + + + + + {queryParams.draftId && ( + + )} + - - + + + + + + + + + ); }; diff --git a/app/src/features/projects/create/__snapshots__/CreateProjectPage.test.tsx.snap b/app/src/features/projects/create/__snapshots__/CreateProjectPage.test.tsx.snap deleted file mode 100644 index 9809bffa48..0000000000 --- a/app/src/features/projects/create/__snapshots__/CreateProjectPage.test.tsx.snap +++ /dev/null @@ -1,998 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`CreateProjectPage renders the initial default page correctly 1`] = ` - -
    -
    - -
    -

    - Create Project -

    - -
    -
    -
    - - Draft saved on - -
    -
    -
    -
    -
    -
    -
    - - - - - - -

    - Project Contact -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - Project Permits -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - General Information -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - Objectives -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - Locations -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - IUCN Conservation Actions Classification -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - Funding -

    -
    -
    -
    -
    -
    - -
    -
    - - - - - - -

    - Partnerships -

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    - Project Contact -

    -
    -

    - Enter the contact information for the person directly responsible for the project. This information will be used as the primary contact should questions arise about this project. -

    -
    -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    -
    - -
    - - -
    -
    -
    -
    - -
    -
    -
    -
    - - Share Contact Details - -

    - Do you want the project contact’s name and email address visible to the public? -

    -
    -
    - - -

    -

    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - - - -
    -
    -
    -
    -
    -
    - , - -`; diff --git a/app/src/features/projects/edit/EditProjectForm.tsx b/app/src/features/projects/edit/EditProjectForm.tsx new file mode 100644 index 0000000000..66ae754833 --- /dev/null +++ b/app/src/features/projects/edit/EditProjectForm.tsx @@ -0,0 +1,220 @@ +import { Box, Button, Divider, Typography } from '@material-ui/core'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; +import HorizontalSplitFormComponent from 'components/fields/HorizontalSplitFormComponent'; +import { Formik, FormikProps } from 'formik'; +import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; +import { IUpdateProjectRequest } from 'interfaces/useProjectApi.interface'; +import React from 'react'; +import ProjectCoordinatorForm from '../components/ProjectCoordinatorForm'; +import ProjectDetailsForm from '../components/ProjectDetailsForm'; +import ProjectFundingForm from '../components/ProjectFundingForm'; +import ProjectIUCNForm from '../components/ProjectIUCNForm'; +import ProjectLocationForm from '../components/ProjectLocationForm'; +import ProjectObjectivesForm from '../components/ProjectObjectivesForm'; +import ProjectPartnershipsForm from '../components/ProjectPartnershipsForm'; +import { initialProjectFieldData, validationProjectYupSchema } from '../create/CreateProjectForm'; + +const useStyles = makeStyles((theme: Theme) => ({ + actionButton: { + minWidth: '6rem', + '& + button': { + marginLeft: '0.5rem' + } + }, + sectionDivider: { + marginTop: theme.spacing(5), + marginBottom: theme.spacing(5) + } +})); + +export interface IEditProjectForm { + codes: IGetAllCodeSetsResponse; + projectData: IUpdateProjectRequest; + handleSubmit: (formikData: IUpdateProjectRequest) => void; + handleCancel: () => void; + formikRef: React.RefObject>; +} + +/** + * Form for creating a new project. + * + * @return {*} + */ +const EditProjectForm: React.FC = (props) => { + const { codes, formikRef } = props; + + const classes = useStyles(); + + const handleSubmit = async (formikData: IUpdateProjectRequest) => { + props.handleSubmit(formikData); + }; + + const handleCancel = () => { + props.handleCancel(); + }; + + return ( + + + <> + {/* */} + + + { + return { value: item.id, label: item.name }; + }) || [] + } + activity={ + codes?.activity?.map((item) => { + return { value: item.id, label: item.name }; + }) || [] + } + /> + + + + + + IUCN Conservation Actions Classification + + + Conservation actions are specific actions or sets of tasks undertaken by project staff designed to + reach each of the project's objectives. + + + + { + return { value: item.id, label: item.name }; + }) || [] + } + subClassifications1={ + codes?.iucn_conservation_action_level_2_subclassification?.map((item) => { + return { value: item.id, iucn1_id: item.iucn1_id, label: item.name }; + }) || [] + } + subClassifications2={ + codes?.iucn_conservation_action_level_3_subclassification?.map((item) => { + return { value: item.id, iucn2_id: item.iucn2_id, label: item.name }; + }) || [] + } + /> + + + + }> + + + + { + return item.name; + }) || [] + } + /> + }> + + + + + + + Funding Sources + + + Specify funding sources for the project. Note: Dollar amounts are not intended to + be exact, please round to the nearest 100. + + + { + return { value: item.id, label: item.name }; + }) || [] + } + investment_action_category={ + codes?.investment_action_category?.map((item) => { + return { value: item.id, fs_id: item.fs_id, label: item.name }; + }) || [] + } + /> + + + + + Partnerships + + + Additional partnerships that have not been previously identified as a funding sources. + + + { + return { value: item.id, label: item.name }; + }) || [] + } + stakeholder_partnerships={ + codes?.funding_source?.map((item) => { + return { value: item.name, label: item.name }; + }) || [] + } + /> + + + + }> + + + + }> + + + + + + + + + + + ); +}; + +export default EditProjectForm; diff --git a/app/src/features/projects/edit/EditProjectPage.tsx b/app/src/features/projects/edit/EditProjectPage.tsx new file mode 100644 index 0000000000..bccedf67d3 --- /dev/null +++ b/app/src/features/projects/edit/EditProjectPage.tsx @@ -0,0 +1,235 @@ +import Box from '@material-ui/core/Box'; +import Button from '@material-ui/core/Button'; +import CircularProgress from '@material-ui/core/CircularProgress'; +import Container from '@material-ui/core/Container'; +import Paper from '@material-ui/core/Paper'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; +import Typography from '@material-ui/core/Typography'; +import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; +import { EditProjectI18N } from 'constants/i18n'; +import { DialogContext } from 'contexts/dialogContext'; +import { FormikProps } from 'formik'; +import * as History from 'history'; +import { useBiohubApi } from 'hooks/useBioHubApi'; +import useDataLoader from 'hooks/useDataLoader'; +import { useQuery } from 'hooks/useQuery'; +import { IUpdateProjectRequest, UPDATE_GET_ENTITIES } from 'interfaces/useProjectApi.interface'; +import React, { useContext, useEffect, useRef, useState } from 'react'; +import { useHistory } from 'react-router'; +import { Prompt } from 'react-router-dom'; +import EditProjectForm from './EditProjectForm'; + +const useStyles = makeStyles((theme: Theme) => ({ + pageTitleContainer: { + maxWidth: '170ch', + overflow: 'hidden', + textOverflow: 'ellipsis' + }, + pageTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + paddingTop: theme.spacing(0.5), + paddingBottom: theme.spacing(0.5), + overflow: 'hidden' + }, + pageTitleActions: { + paddingTop: theme.spacing(0.75), + paddingBottom: theme.spacing(0.75), + '& button': { + marginLeft: theme.spacing(1) + } + } +})); + +/** + * Page for creating a new project. + * + * @return {*} + */ +const EditProjectPage: React.FC = (props) => { + const classes = useStyles(); + + const history = useHistory(); + + const biohubApi = useBiohubApi(); + + const queryParams = useQuery(); + + // Reference to pass to the formik component in order to access its state at any time + // Used by the draft logic to fetch the values of a step form that has not been validated/completed + const formikRef = useRef>(null); + + // Ability to bypass showing the 'Are you sure you want to cancel' dialog + const [enableCancelCheck, setEnableCancelCheck] = useState(true); + + const dialogContext = useContext(DialogContext); + + const codesDataLoader = useDataLoader(() => biohubApi.codes.getAllCodeSets()); + codesDataLoader.load(); + + const editProjectDataLoader = useDataLoader((projectId: number) => + biohubApi.project.getProjectForUpdate(projectId, [ + UPDATE_GET_ENTITIES.coordinator, + UPDATE_GET_ENTITIES.project, + UPDATE_GET_ENTITIES.objectives, + UPDATE_GET_ENTITIES.location, + UPDATE_GET_ENTITIES.iucn, + UPDATE_GET_ENTITIES.funding, + UPDATE_GET_ENTITIES.partnerships + ]) + ); + + if (queryParams.projectId) { + editProjectDataLoader.load(queryParams.projectId); + } + + useEffect(() => { + const setFormikValues = (data: IUpdateProjectRequest) => { + formikRef.current?.setValues(data); + }; + + if (editProjectDataLoader.data) { + setFormikValues(editProjectDataLoader.data); + } + }, [editProjectDataLoader]); + + const defaultCancelDialogProps = { + dialogTitle: EditProjectI18N.cancelTitle, + dialogText: EditProjectI18N.cancelText, + open: false, + onClose: () => { + dialogContext.setYesNoDialog({ open: false }); + }, + onNo: () => { + dialogContext.setYesNoDialog({ open: false }); + }, + onYes: () => { + dialogContext.setYesNoDialog({ open: false }); + history.push(`/admin/projects/${queryParams.projectId}`); + } + }; + + const defaultErrorDialogProps = { + onClose: () => { + dialogContext.setErrorDialog({ open: false }); + }, + onOk: () => { + dialogContext.setErrorDialog({ open: false }); + } + }; + + const showCreateErrorDialog = (textDialogProps?: Partial) => { + dialogContext.setErrorDialog({ + dialogTitle: EditProjectI18N.createErrorTitle, + dialogText: EditProjectI18N.createErrorText, + ...defaultErrorDialogProps, + ...textDialogProps, + open: true + }); + }; + + const handleCancel = () => { + dialogContext.setYesNoDialog(defaultCancelDialogProps); + history.push(`/admin/projects/${queryParams.projectId}`); + }; + + /** + * Creates a new project record + * + * @param {IUpdateProjectRequest} projectPostObject + * @return {*} + */ + const updateProject = async (projectPostObject: IUpdateProjectRequest) => { + const response = await biohubApi.project.updateProject(queryParams.projectId, projectPostObject); + + if (!response?.id) { + showCreateErrorDialog({ dialogError: 'The response from the server was null, or did not contain a project ID.' }); + return; + } + + setEnableCancelCheck(false); + + history.push(`/admin/projects/${response.id}`); + }; + + /** + * Intercepts all navigation attempts (when used with a `Prompt`). + * + * Returning true allows the navigation, returning false prevents it. + * + * @param {History.Location} location + * @return {*} + */ + const handleLocationChange = (location: History.Location, action: History.Action) => { + if (!dialogContext.yesNoDialogProps.open) { + // If the cancel dialog is not open: open it + dialogContext.setYesNoDialog({ + ...defaultCancelDialogProps, + onYes: () => { + dialogContext.setYesNoDialog({ open: false }); + history.push(location.pathname); + }, + open: true + }); + return false; + } + + // If the cancel dialog is already open and another location change action is triggered: allow it + return true; + }; + + if (!codesDataLoader.data || !editProjectDataLoader.data) { + return ; + } + + return ( + <> + + + + + + + + Edit Project Details + + + + + + + + + + + + + + + + + + + ); +}; + +export default EditProjectPage; diff --git a/app/src/features/projects/list/ProjectsListPage.test.tsx b/app/src/features/projects/list/ProjectsListPage.test.tsx index 138a81e494..85c7e3a619 100644 --- a/app/src/features/projects/list/ProjectsListPage.test.tsx +++ b/app/src/features/projects/list/ProjectsListPage.test.tsx @@ -89,7 +89,7 @@ describe('ProjectsListPage', () => { }); }); - test('renders with a proper list of projects when published and completed', async () => { + test('renders with a proper list of projects when completed', async () => { mockBiohubApi().project.getProjectsList.mockResolvedValue([ { id: 1, @@ -99,7 +99,6 @@ describe('ProjectsListPage', () => { coordinator_agency: 'contact agency', project_type: 'project type', permits_list: '1, 2, 3', - publish_status: 'Published', completion_status: 'Completed' } ]); @@ -112,12 +111,11 @@ describe('ProjectsListPage', () => { await waitFor(() => { expect(getByTestId('project-table')).toBeInTheDocument(); - expect(getByText('Published')).toBeInTheDocument(); expect(getByText('Completed')).toBeInTheDocument(); }); }); - test('renders with a proper list of projects when Unpublished and active', async () => { + test('renders with a proper list of projects when active', async () => { mockBiohubApi().project.getProjectsList.mockResolvedValue([ { id: 1, @@ -127,7 +125,6 @@ describe('ProjectsListPage', () => { coordinator_agency: 'contact agency', project_type: 'project type', permits_list: '1, 2, 3', - publish_status: 'Unpublished', completion_status: 'Active' } ]); @@ -140,7 +137,6 @@ describe('ProjectsListPage', () => { await waitFor(() => { expect(getByTestId('project-table')).toBeInTheDocument(); - expect(getByText('Unpublished')).toBeInTheDocument(); expect(getByText('Active')).toBeInTheDocument(); }); }); @@ -244,7 +240,6 @@ describe('ProjectsListPage', () => { coordinator_agency: 'contact agency', project_type: 'project type', permits_list: '1, 2, 3', - publish_status: 'Published', completion_status: 'Completed' } ]); diff --git a/app/src/features/projects/list/ProjectsListPage.tsx b/app/src/features/projects/list/ProjectsListPage.tsx index e18cac048b..2cc6c9d84a 100644 --- a/app/src/features/projects/list/ProjectsListPage.tsx +++ b/app/src/features/projects/list/ProjectsListPage.tsx @@ -13,6 +13,7 @@ import TableCell from '@material-ui/core/TableCell'; import TableContainer from '@material-ui/core/TableContainer'; import TableHead from '@material-ui/core/TableHead'; import TableRow from '@material-ui/core/TableRow'; +import Toolbar from '@material-ui/core/Toolbar'; import Typography from '@material-ui/core/Typography'; import { mdiFilterOutline, mdiPlus } from '@mdi/js'; import Icon from '@mdi/react'; @@ -37,14 +38,36 @@ import { useHistory } from 'react-router'; import { getFormattedDate } from 'utils/Utils'; const useStyles = makeStyles((theme: Theme) => ({ + pageTitleContainer: { + maxWidth: '170ch', + overflow: 'hidden', + textOverflow: 'ellipsis' + }, + pageTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + paddingTop: theme.spacing(0.5), + paddingBottom: theme.spacing(0.5), + overflow: 'hidden' + }, + pageTitleActions: { + paddingTop: theme.spacing(0.75), + paddingBottom: theme.spacing(0.75) + }, actionButton: { - minWidth: '6rem', - '& + button': { - marginLeft: '0.5rem' - } + marginLeft: theme.spacing(1), + minWidth: '6rem' + }, + projectsTable: { + tableLayout: 'fixed' + }, + toolbarCount: { + fontWeight: 400 }, linkButton: { - textAlign: 'left' + textAlign: 'left', + fontWeight: 700 }, filtersBox: { background: '#f7f8fa' @@ -55,12 +78,9 @@ const useStyles = makeStyles((theme: Theme) => ({ chipActive: { backgroundColor: theme.palette.success.main }, - chipPublishedCompleted: { + chipCompleted: { backgroundColor: theme.palette.success.main }, - chipUnpublished: { - backgroundColor: theme.palette.text.disabled - }, chipDraft: { backgroundColor: theme.palette.info.main } @@ -91,18 +111,12 @@ const ProjectsListPage: React.FC = () => { let chipLabel; let chipStatusClass; - if (ProjectStatusType.UNPUBLISHED === status_name) { - chipLabel = 'Unpublished'; - chipStatusClass = classes.chipUnpublished; - } else if (ProjectStatusType.PUBLISHED === status_name) { - chipLabel = 'Published'; - chipStatusClass = classes.chipPublishedCompleted; - } else if (ProjectStatusType.ACTIVE === status_name) { + if (ProjectStatusType.ACTIVE === status_name) { chipLabel = 'Active'; chipStatusClass = classes.chipActive; } else if (ProjectStatusType.COMPLETED === status_name) { chipLabel = 'Completed'; - chipStatusClass = classes.chipPublishedCompleted; + chipStatusClass = classes.chipCompleted; } else if (ProjectStatusType.DRAFT === status_name) { chipLabel = 'Draft'; chipStatusClass = classes.chipDraft; @@ -226,87 +240,82 @@ const ProjectsListPage: React.FC = () => { const hasDrafts = drafts?.length > 0; if (!hasProjects && !hasDrafts) { - return ( - - - - Name - Type - Permits - Contact Agency - Start Date - End Date - - - - - - - No Results - - - - -
    - ); - } else { return ( - +
    Name Type - Permits Contact Agency + Status Start Date End Date - Status - Publish Status + + + + + + + No Results + + + + +
    +
    + ); + } else { + return ( + + + + + Name + Contact Agency + Type + Status + Start Date + End Date {drafts?.map((row) => ( - + navigateToCreateProjectPage(row.id)}> {row.name} + {getChipIcon('Draft')} - - {getChipIcon('Draft')} - {getChipIcon('Unpublished')} ))} {projects?.map((row) => ( - + navigateToProjectPage(row.id)}> {row.name} - {row.project_type} - {row.permits_list} {row.coordinator_agency} + {row.project_type} + {getChipIcon(row.completion_status)} {getFormattedDate(DATE_FORMAT.ShortMediumDateFormat, row.start_date)} {getFormattedDate(DATE_FORMAT.ShortMediumDateFormat, row.end_date)} - {getChipIcon(row.completion_status)} - {getChipIcon(row.publish_status)} ))} @@ -320,77 +329,104 @@ const ProjectsListPage: React.FC = () => { * Displays project list. */ return ( - - - - Projects - - - - - - - - {projectCount} {projectCount !== 1 ? 'Projects' : 'Project'} found - - {codes && ( - - )} - - - {isFiltersOpen && ( - - - - { - return item.name; - }) || [] - } - funding_sources={ - codes?.funding_source?.map((item) => { - return { value: item.id, label: item.name }; - }) || [] - } - /> - - + <> + + + + + + + Projects + + {/* + + You have 11 documents to review + + */} + + + - - + - - )} - {getProjectsTableData()} - + + + + + + + + + Projects found{' '} + + ({projectCount}) + + + {codes && ( + + )} + + + {isFiltersOpen && ( + + + + { + return item.name; + }) || [] + } + funding_sources={ + codes?.funding_source?.map((item) => { + return { value: item.id, label: item.name }; + }) || [] + } + /> + + + + + + + + + )} + {getProjectsTableData()} + + - + ); }; diff --git a/app/src/features/projects/participants/AddProjectParticipantsForm.tsx b/app/src/features/projects/participants/AddProjectParticipantsForm.tsx index 2392c207ce..367199f31f 100644 --- a/app/src/features/projects/participants/AddProjectParticipantsForm.tsx +++ b/app/src/features/projects/participants/AddProjectParticipantsForm.tsx @@ -101,8 +101,15 @@ const AddProjectParticipantsForm: React.FC = (p IDIR - - BCEID + + BCeID Basic + + + BCeID Business {identitySourceMeta.touched && identitySourceMeta.error} diff --git a/app/src/features/projects/participants/ProjectParticipantsHeader.tsx b/app/src/features/projects/participants/ProjectParticipantsHeader.tsx index c0143fc1ec..4eb6bf6f54 100644 --- a/app/src/features/projects/participants/ProjectParticipantsHeader.tsx +++ b/app/src/features/projects/participants/ProjectParticipantsHeader.tsx @@ -1,10 +1,11 @@ import Box from '@material-ui/core/Box'; -import Breadcrumbs from '@material-ui/core/Breadcrumbs'; import Button from '@material-ui/core/Button'; import Container from '@material-ui/core/Container'; -import Link from '@material-ui/core/Link'; +import Paper from '@material-ui/core/Paper'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; import Typography from '@material-ui/core/Typography'; -import { mdiPlus } from '@mdi/js'; +import { mdiArrowLeft, mdiPlus } from '@mdi/js'; import Icon from '@mdi/react'; import EditDialog from 'components/dialog/EditDialog'; import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; @@ -22,6 +23,26 @@ import AddProjectParticipantsForm, { IAddProjectParticipantsForm } from './AddProjectParticipantsForm'; +const useStyles = makeStyles((theme: Theme) => ({ + projectTitleContainer: { + maxWidth: '170ch', + overflow: 'hidden', + textOverflow: 'ellipsis' + }, + projectTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + paddingTop: theme.spacing(0.5), + paddingBottom: theme.spacing(0.5), + overflow: 'hidden' + }, + titleActions: { + paddingTop: theme.spacing(0.75), + paddingBottom: theme.spacing(0.75) + } +})); + export interface IProjectParticipantsHeaderProps { projectWithDetails: IGetProjectForViewResponse; codes: IGetAllCodeSetsResponse; @@ -35,6 +56,7 @@ export interface IProjectParticipantsHeaderProps { * @return {*} */ const ProjectParticipantsHeader: React.FC = (props) => { + const classes = useStyles(); const history = useHistory(); const urlParams = useParams(); const dialogContext = useContext(DialogContext); @@ -80,36 +102,38 @@ const ProjectParticipantsHeader: React.FC = (pr }; return ( - <> + - - - history.push('/admin/projects')} aria-current="page"> - Projects - - history.push(`/admin/projects/${props.projectWithDetails.id}`)} - aria-current="page"> - {props.projectWithDetails.project.project_name} - - Project Team - - - - - Project Team - + + + + + + + Manage Project Team + + + + + + + + @@ -144,7 +168,7 @@ const ProjectParticipantsHeader: React.FC = (pr }); }} /> - + ); }; diff --git a/app/src/features/projects/participants/ProjectParticipantsPage.tsx b/app/src/features/projects/participants/ProjectParticipantsPage.tsx index eca1861fab..06fd1fe573 100644 --- a/app/src/features/projects/participants/ProjectParticipantsPage.tsx +++ b/app/src/features/projects/participants/ProjectParticipantsPage.tsx @@ -1,6 +1,7 @@ import Box from '@material-ui/core/Box'; import CircularProgress from '@material-ui/core/CircularProgress'; import Container from '@material-ui/core/Container'; +import Divider from '@material-ui/core/Divider'; import IconButton from '@material-ui/core/IconButton'; import Paper from '@material-ui/core/Paper'; import { makeStyles } from '@material-ui/core/styles'; @@ -11,7 +12,7 @@ import TableHead from '@material-ui/core/TableHead'; import TableRow from '@material-ui/core/TableRow'; import Toolbar from '@material-ui/core/Toolbar'; import Typography from '@material-ui/core/Typography'; -import { mdiMenuDown, mdiTrashCanOutline } from '@mdi/js'; +import { mdiChevronDown, mdiTrashCanOutline } from '@mdi/js'; import Icon from '@mdi/react'; import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; import { IYesNoDialogProps } from 'components/dialog/YesNoDialog'; @@ -30,16 +31,25 @@ import { useParams } from 'react-router'; import ProjectParticipantsHeader from './ProjectParticipantsHeader'; const useStyles = makeStyles((theme) => ({ + projectTitleContainer: { + maxWidth: '170ch', + overflow: 'hidden', + textOverflow: 'ellipsis' + }, + projectTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + paddingTop: theme.spacing(0.5), + paddingBottom: theme.spacing(0.5), + overflow: 'hidden' + }, actionButton: { minWidth: '6rem', '& + button': { marginLeft: '0.5rem' } }, - teamMembersToolbar: { - paddingLeft: theme.spacing(2), - paddingRight: theme.spacing(2) - }, teamMembersTable: { tableLayout: 'fixed', '& td': { @@ -201,85 +211,87 @@ const ProjectParticipantsPage: React.FC = () => { - - - + + + Team Members -
    - - - Username - Project Role - - Actions - - - - - {hasProjectParticipants && - projectParticipants?.map((row) => ( - - - {row.user_identifier} - - - - - - - - - - openYesNoDialog({ - dialogTitle: ProjectParticipantsI18N.removeParticipantTitle, - dialogContent: ( - - Removing user {row.user_identifier} will revoke their access to - project. Are you sure you want to proceed? - - ), - yesButtonProps: { color: 'secondary' }, - onYes: () => { - handleRemoveProjectParticipant(row.project_participation_id); - dialogContext.setYesNoDialog({ open: false }); - dialogContext.setSnackbar({ - open: true, - snackbarMessage: ( - - User {row.user_identifier} removed from project. - - ) - }); - } - }) - }> - - - - - - ))} - {!hasProjectParticipants && ( + + + +
    + - - - No Team Members - + Username + Project Role + + Actions - )} - -
    + + + {hasProjectParticipants && + projectParticipants?.map((row) => ( + + {row.user_identifier} + + + + + + + + + openYesNoDialog({ + dialogTitle: ProjectParticipantsI18N.removeParticipantTitle, + dialogContent: ( + + Removing user {row.user_identifier} will revoke their access to + project. Are you sure you want to proceed? + + ), + yesButtonProps: { color: 'secondary' }, + onYes: () => { + handleRemoveProjectParticipant(row.project_participation_id); + dialogContext.setYesNoDialog({ open: false }); + dialogContext.setSnackbar({ + open: true, + snackbarMessage: ( + + User {row.user_identifier} removed from project. + + ) + }); + } + }) + }> + + + + + + ))} + {!hasProjectParticipants && ( + + + + No Team Members + + + + )} + + + @@ -389,14 +401,14 @@ const ChangeProjectRoleMenu: React.FC = (props) => { return { menuLabel: roleCode.name, menuOnClick: () => handleChangeUserPermissionsClick(row, roleCode.name, roleCode.id) }; })} - buttonEndIcon={} + buttonEndIcon={} /> ); }; diff --git a/app/src/features/projects/view/ProjectAttachments.test.tsx b/app/src/features/projects/view/ProjectAttachments.test.tsx index 8b2affffb7..aa2e6dbadc 100644 --- a/app/src/features/projects/view/ProjectAttachments.test.tsx +++ b/app/src/features/projects/view/ProjectAttachments.test.tsx @@ -29,7 +29,7 @@ const mockBiohubApi = ((useBiohubApi as unknown) as jest.Mock { +describe.skip('ProjectAttachments', () => { beforeEach(() => { // clear mocks before each test mockBiohubApi().project.getProjectAttachments.mockClear(); @@ -47,20 +47,16 @@ describe('ProjectAttachments', () => { ); - expect(getByText('Upload')).toBeInTheDocument(); - expect(queryByText('Upload Attachments')).toBeNull(); + expect(getByText('Submit Documents')).toBeInTheDocument(); + expect(queryByText('Upload Attachment')).toBeNull(); - fireEvent.click(getByText('Upload')); + fireEvent.click(getByText('Submit Documents')); await waitFor(() => { - expect(getByText('Upload Attachments')).toBeInTheDocument(); + expect(getByText('Submit Attachments')).toBeInTheDocument(); }); - fireEvent.click(getByText('Upload Attachments')); - - await waitFor(() => { - expect(queryByText('Upload Attachments')).toBeNull(); - }); + fireEvent.click(getByText('Submit Attachments')); expect(getByText('Close')).toBeInTheDocument(); }); @@ -72,10 +68,10 @@ describe('ProjectAttachments', () => { ); - expect(getByText('No Attachments')).toBeInTheDocument(); + expect(getByText('No Documents')).toBeInTheDocument(); }); - it('renders correctly with attachments', async () => { + it.skip('renders correctly with attachments', async () => { mockBiohubApi().project.getProjectAttachments.mockResolvedValue({ attachmentsList: [ { @@ -98,7 +94,7 @@ describe('ProjectAttachments', () => { }); }); - it('deletes an attachment from the attachments list as expected', async () => { + it.skip('deletes an attachment from the attachments list as expected', async () => { mockBiohubApi().project.deleteProjectAttachment.mockResolvedValue(1); mockBiohubApi().project.getProjectAttachments.mockResolvedValue({ attachmentsList: [ @@ -164,7 +160,7 @@ describe('ProjectAttachments', () => { }); }); - it('does not delete an attachment from the attachments when user selects no from dialog', async () => { + it.skip('does not delete an attachment from the attachments when user selects no from dialog', async () => { mockBiohubApi().project.deleteProjectAttachment.mockResolvedValue(1); mockBiohubApi().project.getProjectAttachments.mockResolvedValue({ attachmentsList: [ @@ -213,7 +209,7 @@ describe('ProjectAttachments', () => { }); }); - it('does not delete an attachment from the attachments when user clicks outside the dialog', async () => { + it.skip('does not delete an attachment from the attachments when user clicks outside the dialog', async () => { mockBiohubApi().project.deleteProjectAttachment.mockResolvedValue(1); mockBiohubApi().project.getProjectAttachments.mockResolvedValue({ attachmentsList: [ diff --git a/app/src/features/projects/view/ProjectAttachments.tsx b/app/src/features/projects/view/ProjectAttachments.tsx index 84d052fbba..b97c8e83ec 100644 --- a/app/src/features/projects/view/ProjectAttachments.tsx +++ b/app/src/features/projects/view/ProjectAttachments.tsx @@ -1,16 +1,22 @@ import Box from '@material-ui/core/Box'; -import Paper from '@material-ui/core/Paper'; -import { mdiMenuDown, mdiTrayArrowUp } from '@mdi/js'; +import Button from '@material-ui/core/Button'; +import Divider from '@material-ui/core/Divider'; +import ListItemIcon from '@material-ui/core/ListItemIcon'; +import Menu from '@material-ui/core/Menu'; +import MenuItem from '@material-ui/core/MenuItem'; +import Toolbar from '@material-ui/core/Toolbar'; +import Typography from '@material-ui/core/Typography'; +import { mdiAttachment, mdiChevronDown, mdiFilePdfBox, mdiPlus } from '@mdi/js'; import Icon from '@mdi/react'; import AttachmentsList from 'components/attachments/AttachmentsList'; -import { IUploadHandler } from 'components/attachments/FileUploadItem'; import { IReportMetaForm } from 'components/attachments/ReportMetaForm'; -import FileUploadWithMetaDialog from 'components/dialog/FileUploadWithMetaDialog'; -import { H2MenuToolbar } from 'components/toolbar/ActionToolbars'; +import FileUploadWithMetaDialog from 'components/dialog/attachments/FileUploadWithMetaDialog'; +import { IUploadHandler } from 'components/file-upload/FileUploadItem'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { IGetProjectAttachment, IGetProjectForViewResponse, + IGetProjectReportAttachment, IUploadAttachmentResponse } from 'interfaces/useProjectApi.interface'; import React, { useCallback, useEffect, useState } from 'react'; @@ -21,6 +27,11 @@ export interface IProjectAttachmentsProps { projectForViewData: IGetProjectForViewResponse; } +export interface IAttachmentType { + id: number; + type: 'Report' | 'Other'; +} + /** * Project attachments content for a project. * @@ -36,6 +47,10 @@ const ProjectAttachments: React.FC = () => { AttachmentType.OTHER ); const [attachmentsList, setAttachmentsList] = useState([]); + const [reportAttachmentsList, setReportAttachmentsList] = useState([]); + + // Tracks which attachment rows have been selected, via the table checkboxes. + const [selectedAttachmentRows, setSelectedAttachmentRows] = useState([]); const handleUploadReportClick = () => { setAttachmentType(AttachmentType.REPORT); @@ -47,7 +62,7 @@ const ProjectAttachments: React.FC = () => { }; const getAttachments = useCallback( - async (forceFetch: boolean) => { + async (forceFetch: boolean): Promise => { if (attachmentsList.length && !forceFetch) { return; } @@ -55,13 +70,16 @@ const ProjectAttachments: React.FC = () => { try { const response = await biohubApi.project.getProjectAttachments(projectId); - if (!response?.attachmentsList) { + if (!response?.attachmentsList && !response?.reportAttachmentsList) { return; } + setReportAttachmentsList([...response.reportAttachmentsList]); setAttachmentsList([...response.attachmentsList]); + + return [...response.reportAttachmentsList, ...response.attachmentsList]; } catch (error) { - return error; + return; } }, [biohubApi.project, projectId, attachmentsList.length] @@ -86,6 +104,17 @@ const ProjectAttachments: React.FC = () => { // eslint-disable-next-line }, []); + // Show/Hide Project Settings Menu + const [anchorEl, setAnchorEl] = useState(null); + + const handleClick = (event: React.MouseEvent) => { + setAnchorEl(event.currentTarget); + }; + + const handleClose = () => { + setAnchorEl(null); + }; + return ( <> = () => { }} uploadHandler={getUploadHandler()} /> - - } - buttonEndIcon={} - menuItems={[ - { menuLabel: 'Upload Report', menuOnClick: handleUploadReportClick }, - { menuLabel: 'Upload Attachments', menuOnClick: handleUploadAttachmentClick } - ]} - /> - - + + {/* Need to use the regular toolbar in lieu of these action toolbars given it doesn't support multiple buttons */} + + + Documents + + + + + + + + + Add Report + + + + + + Add Attachments + + - + + + + setSelectedAttachmentRows(items)} + onCheckboxChange={(value, add) => { + const found = selectedAttachmentRows.findIndex((item) => item.id === value.id && item.type === value.type); + const updated = [...selectedAttachmentRows]; + if (found < 0 && add) { + updated.push(value); + } else if (found >= 0 && !add) { + updated.splice(found, 1); + } + setSelectedAttachmentRows(updated); + }} + /> + ); }; diff --git a/app/src/features/projects/view/ProjectDetails.tsx b/app/src/features/projects/view/ProjectDetails.tsx index 49f3453858..81be7ed64a 100644 --- a/app/src/features/projects/view/ProjectDetails.tsx +++ b/app/src/features/projects/view/ProjectDetails.tsx @@ -1,5 +1,9 @@ import Box from '@material-ui/core/Box'; -import Paper from '@material-ui/core/Paper'; +import { grey } from '@material-ui/core/colors'; +import Divider from '@material-ui/core/Divider'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; +import makeStyles from '@material-ui/core/styles/makeStyles'; +import Toolbar from '@material-ui/core/Toolbar'; import Typography from '@material-ui/core/Typography'; import FundingSource from 'features/projects/view/components/FundingSource'; import GeneralInformation from 'features/projects/view/components/GeneralInformation'; @@ -10,7 +14,6 @@ import ProjectObjectives from 'features/projects/view/components/ProjectObjectiv import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; import { IGetProjectForViewResponse } from 'interfaces/useProjectApi.interface'; import React from 'react'; -import ProjectPermits from './components/ProjectPermits'; export interface IProjectDetailsProps { projectForViewData: IGetProjectForViewResponse; @@ -18,6 +21,50 @@ export interface IProjectDetailsProps { refresh: () => void; } +const useStyles = makeStyles((theme: Theme) => ({ + projectTitle: { + fontWeight: 400 + }, + projectMetadata: { + '& section + section': { + marginTop: theme.spacing(4) + }, + '& dt': { + flex: '0 0 40%' + }, + '& dd': { + flex: '1 1 auto' + }, + '& .MuiListItem-root': { + paddingTop: theme.spacing(1.5), + paddingBottom: theme.spacing(1.5) + }, + '& .MuiListItem-root:first-of-type': { + paddingTop: 0 + }, + '& .MuiListItem-root:last-of-type': { + paddingBottom: 0 + } + }, + projectMetaSectionHeader: { + fontSize: '14px', + fontWeight: 700, + letterSpacing: '0.02rem', + textTransform: 'uppercase', + color: grey[600], + '& + hr': { + marginTop: theme.spacing(1.5), + marginBottom: theme.spacing(1.5) + } + }, + projectMetaObjectives: { + display: '-webkit-box', + '-webkit-line-clamp': 4, + '-webkit-box-orient': 'vertical', + overflow: 'hidden' + } +})); + /** * Project details content for a project. * @@ -25,34 +72,68 @@ export interface IProjectDetailsProps { */ const ProjectDetails: React.FC = (props) => { const { projectForViewData, codes, refresh } = props; + const classes = useStyles(); return ( - <> - - Project Details - - - - + + + + Project Details + + + + + + + Project Objectives + + - - - - - + + + + General Information + + + - - + + + + Project Coordinator + + + - + + + + Funding Sources + + - - + + + + Partnerships + + + + + + + + + + IUCN Classification + + + - + ); }; diff --git a/app/src/features/projects/view/ProjectHeader.tsx b/app/src/features/projects/view/ProjectHeader.tsx index 21ac758850..8b5f9de370 100644 --- a/app/src/features/projects/view/ProjectHeader.tsx +++ b/app/src/features/projects/view/ProjectHeader.tsx @@ -1,33 +1,43 @@ +import { CircularProgress } from '@material-ui/core'; import Box from '@material-ui/core/Box'; -import Breadcrumbs from '@material-ui/core/Breadcrumbs'; import Button from '@material-ui/core/Button'; -import Chip from '@material-ui/core/Chip'; import Container from '@material-ui/core/Container'; -import IconButton from '@material-ui/core/IconButton'; -import Link from '@material-ui/core/Link'; +import ListItemIcon from '@material-ui/core/ListItemIcon'; +import Menu from '@material-ui/core/Menu'; +import MenuItem from '@material-ui/core/MenuItem'; import Paper from '@material-ui/core/Paper'; import { Theme } from '@material-ui/core/styles/createMuiTheme'; import makeStyles from '@material-ui/core/styles/makeStyles'; -import Tooltip from '@material-ui/core/Tooltip'; import Typography from '@material-ui/core/Typography'; -import { mdiTrashCanOutline } from '@mdi/js'; +import { + mdiAccountMultipleOutline, + mdiCalendarRangeOutline, + mdiCalendarTodayOutline, + mdiChevronDown, + mdiCogOutline, + mdiPencilOutline, + mdiTrashCanOutline +} from '@mdi/js'; import Icon from '@mdi/react'; -import clsx from 'clsx'; import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { DeleteProjectI18N, PublishProjectI18N } from 'constants/i18n'; -import { ProjectStatusType } from 'constants/misc'; +import { DeleteProjectI18N } from 'constants/i18n'; import { SYSTEM_ROLE } from 'constants/roles'; import { AuthStateContext } from 'contexts/authStateContext'; import { DialogContext } from 'contexts/dialogContext'; import { APIError } from 'hooks/api/useAxios'; import { useBiohubApi } from 'hooks/useBioHubApi'; +import useDataLoader from 'hooks/useDataLoader'; import { IGetProjectForViewResponse } from 'interfaces/useProjectApi.interface'; import React, { useContext } from 'react'; import { useHistory } from 'react-router'; import { getFormattedDateRangeString } from 'utils/Utils'; const useStyles = makeStyles((theme: Theme) => ({ + titleActions: { + paddingTop: theme.spacing(0.75), + paddingBottom: theme.spacing(0.75) + }, projectNav: { minWidth: '15rem', '& a': { @@ -44,10 +54,18 @@ const useStyles = makeStyles((theme: Theme) => ({ } } }, - breadCrumbLink: { - display: 'flex', - alignItems: 'center', - cursor: 'pointer' + projectTitleContainer: { + maxWidth: '150ch', + overflow: 'hidden', + textOverflow: 'ellipsis' + }, + projectTitle: { + display: '-webkit-box', + '-webkit-line-clamp': 2, + '-webkit-box-orient': 'vertical', + paddingTop: theme.spacing(0.5), + paddingBottom: theme.spacing(0.5), + overflow: 'hidden' }, chip: { color: '#ffffff' @@ -58,23 +76,28 @@ const useStyles = makeStyles((theme: Theme) => ({ chipCompleted: { backgroundColor: theme.palette.primary.main }, - spacingRight: { - paddingRight: '1rem' - }, - actionButton: { - minWidth: '6rem', - '& + button': { - marginLeft: '0.5rem' + projectMeta: { + marginTop: theme.spacing(3), + marginBottom: 0, + '& dd': { + flex: '0 0 200px', + color: theme.palette.text.secondary + }, + '& dt': { + flex: '1 1 auto' } }, - projectTitle: { - fontWeight: 400 + projectMetaRow: { + display: 'flex', + '& + div': { + marginTop: theme.spacing(0.25) + } } })); export interface IProjectHeaderProps { projectWithDetails: IGetProjectForViewResponse; - refresh: () => void; + refresh?: () => void; } /** @@ -84,7 +107,7 @@ export interface IProjectHeaderProps { * @return {*} */ const ProjectHeader: React.FC = (props) => { - const { projectWithDetails, refresh } = props; + const { projectWithDetails } = props; const classes = useStyles(); const history = useHistory(); @@ -95,6 +118,9 @@ const ProjectHeader: React.FC = (props) => { const { keycloakWrapper } = useContext(AuthStateContext); + const codesDataLoader = useDataLoader(() => biohubApi.codes.getAllCodeSets()); + codesDataLoader.load(); + const defaultYesNoDialogProps = { dialogTitle: DeleteProjectI18N.deleteTitle, dialogText: DeleteProjectI18N.deleteText, @@ -116,27 +142,6 @@ const ProjectHeader: React.FC = (props) => { } }; - const publishProject = async (publish: boolean) => { - if (!projectWithDetails) { - return; - } - - try { - const response = await biohubApi.project.publishProject(projectWithDetails.id, publish); - - if (!response) { - showPublishErrorDialog({ open: true }); - return; - } - - await refresh(); - } catch (error) { - const apiError = error as APIError; - showPublishErrorDialog({ dialogText: apiError.message, open: true }); - return error; - } - }; - const showDeleteProjectDialog = () => { dialogContext.setYesNoDialog({ ...defaultYesNoDialogProps, @@ -169,130 +174,119 @@ const ProjectHeader: React.FC = (props) => { } }; - const publishErrorDialogProps = { - ...deleteErrorDialogProps, - dialogTitle: PublishProjectI18N.publishErrorTitle, - dialogText: PublishProjectI18N.publishErrorText - }; - const showDeleteErrorDialog = (textDialogProps?: Partial) => { dialogContext.setErrorDialog({ ...deleteErrorDialogProps, ...textDialogProps, open: true }); }; - const showPublishErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ ...publishErrorDialogProps, ...textDialogProps, open: true }); - }; + // Show delete button if you are a system admin or a project admin + const showDeleteProjectButton = keycloakWrapper?.hasSystemRole([ + SYSTEM_ROLE.SYSTEM_ADMIN, + SYSTEM_ROLE.PROJECT_CREATOR, + SYSTEM_ROLE.DATA_ADMINISTRATOR + ]); - const getChipIcon = (status_name: string) => { - let chipLabel; - let chipStatusClass; + // Show/Hide Project Settings Menu + const [anchorEl, setAnchorEl] = React.useState(null); - if (ProjectStatusType.ACTIVE === status_name) { - chipLabel = 'Active'; - chipStatusClass = classes.chipActive; - } else if (ProjectStatusType.COMPLETED === status_name) { - chipLabel = 'Complete'; - chipStatusClass = classes.chipCompleted; - } + const handleClick = (event: React.MouseEvent) => { + setAnchorEl(event.currentTarget); + }; - return ; + const handleClose = () => { + setAnchorEl(null); }; - // Show delete button if you are a system admin or a project admin - const showDeleteProjectButton = keycloakWrapper?.hasSystemRole([ - SYSTEM_ROLE.SYSTEM_ADMIN, - SYSTEM_ROLE.PROJECT_CREATOR - ]); - // Enable delete button if you a system admin OR a project admin and the project is not published - const enableDeleteProjectButton = - keycloakWrapper?.hasSystemRole([SYSTEM_ROLE.SYSTEM_ADMIN]) || - (keycloakWrapper?.hasSystemRole([SYSTEM_ROLE.PROJECT_CREATOR]) && !projectWithDetails.project.publish_date); + if (!codesDataLoader.data) { + return ; + } return ( - + - - - history.push('/admin/projects')} - aria-current="page" - className={classes.breadCrumbLink}> - Projects - - {projectWithDetails.project.project_name} - - - - - - - - Project - {projectWithDetails.project.project_name} + + + + + Project: {projectWithDetails.project.project_name} + + + {projectWithDetails.project.end_date ? ( + <> + + Project Timeline:   + {getFormattedDateRangeString( + DATE_FORMAT.ShortMediumDateFormat, + projectWithDetails.project.start_date, + projectWithDetails.project.end_date + )} + + ) : ( + <> + + Start Date:   + {getFormattedDateRangeString( + DATE_FORMAT.ShortMediumDateFormat, + projectWithDetails.project.start_date + )} + + )} + + - - {getChipIcon(projectWithDetails.project.completion_status)} -    - - {projectWithDetails.project.end_date ? ( - <> - Timeline:{' '} - {getFormattedDateRangeString( - DATE_FORMAT.ShortMediumDateFormat, - projectWithDetails.project.start_date, - projectWithDetails.project.end_date - )} - - ) : ( - <> - Start Date:{' '} - {getFormattedDateRangeString( - DATE_FORMAT.ShortMediumDateFormat, - projectWithDetails.project.start_date - )} - + + + + history.push('users')}> + + + + Manage Project Team + + history.push(`/admin/projects/edit?projectId=${projectWithDetails.id}`)}> + + + + Edit Project Details + + {showDeleteProjectButton && ( + + + + + Delete Project + )} - + - - - - {showDeleteProjectButton && ( - - <> - - - - - - )} - diff --git a/app/src/features/projects/view/ProjectPage.test.tsx b/app/src/features/projects/view/ProjectPage.test.tsx index e46ef12ea8..da6d400bd2 100644 --- a/app/src/features/projects/view/ProjectPage.test.tsx +++ b/app/src/features/projects/view/ProjectPage.test.tsx @@ -2,6 +2,7 @@ import { cleanup, fireEvent, render, waitFor } from '@testing-library/react'; import { SYSTEM_ROLE } from 'constants/roles'; import { AuthStateContext, IAuthState } from 'contexts/authStateContext'; import { DialogContextProvider } from 'contexts/dialogContext'; +import { Feature } from 'geojson'; import { createMemoryHistory } from 'history'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; @@ -25,6 +26,9 @@ const mockUseBiohubApi = { }, codes: { getAllCodeSets: jest.fn, []>() + }, + external: { + post: jest.fn, []>() } }; @@ -52,7 +56,7 @@ const defaultAuthState = { } }; -describe('ProjectPage', () => { +describe.skip('ProjectPage', () => { beforeEach(() => { // clear mocks before each test mockBiohubApi().project.deleteProject.mockClear(); @@ -60,6 +64,7 @@ describe('ProjectPage', () => { mockBiohubApi().survey.getSurveysList.mockClear(); mockBiohubApi().codes.getAllCodeSets.mockClear(); mockBiohubApi().project.publishProject.mockClear(); + mockBiohubApi().external.post.mockClear(); jest.spyOn(console, 'debug').mockImplementation(() => {}); }); @@ -85,6 +90,15 @@ describe('ProjectPage', () => { mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ activity: [{ id: 1, name: 'activity 1' }] } as any); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const { asFragment, findByText } = render( @@ -110,6 +124,15 @@ describe('ProjectPage', () => { mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ activity: [{ id: 1, name: 'activity 1' }] } as any); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const { asFragment, findByText } = render( @@ -133,6 +156,15 @@ describe('ProjectPage', () => { } as any); mockBiohubApi().project.getProjectForView.mockResolvedValue(getProjectForViewResponse); mockBiohubApi().project.deleteProject.mockResolvedValue(true); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const authState = { keycloakWrapper: { @@ -176,6 +208,15 @@ describe('ProjectPage', () => { } as any); mockBiohubApi().project.getProjectForView.mockResolvedValue(getProjectForViewResponse); mockBiohubApi().project.deleteProject.mockResolvedValue(null); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const authState = { keycloakWrapper: { @@ -227,6 +268,15 @@ describe('ProjectPage', () => { } as any); mockBiohubApi().project.getProjectForView.mockResolvedValue(getProjectForViewResponse); mockBiohubApi().project.deleteProject = jest.fn(() => Promise.reject(new Error('API Error is Here'))); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const authState = { keycloakWrapper: { @@ -272,55 +322,30 @@ describe('ProjectPage', () => { }); }); - it('sees delete project button as enabled when accessing an unpublished project as a project administrator', async () => { + it('sees delete project button as enabled when accessing a project as a project administrator', async () => { mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ activity: [{ id: 1, name: 'activity 1' }] } as any); mockBiohubApi().project.getProjectForView.mockResolvedValue({ ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '' } + project: { ...getProjectForViewResponse.project } }); mockBiohubApi().project.deleteProject.mockResolvedValue(true); - - const authState = { - keycloakWrapper: { - ...defaultAuthState.keycloakWrapper, - systemRoles: [SYSTEM_ROLE.PROJECT_CREATOR] as string[], - hasSystemRole: jest.fn().mockReturnValueOnce(true).mockReturnValueOnce(false).mockReturnValueOnce(true) - } - }; - - const { getByTestId, findByText } = render( - - - - - - - - ); - - const projectHeaderText = await findByText('Test Project Name', { selector: 'h1 span' }); - expect(projectHeaderText).toBeVisible(); - - expect(getByTestId('delete-project-button')).toBeEnabled(); - }); - - it('sees delete project button as disabled when accessing a published project as a project administrator', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ - activity: [{ id: 1, name: 'activity 1' }] - } as any); - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '2021-07-07' } + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] }); - mockBiohubApi().project.deleteProject.mockResolvedValue(true); const authState = { keycloakWrapper: { ...defaultAuthState.keycloakWrapper, - systemRoles: [SYSTEM_ROLE.PROJECT_CREATOR] as string[], - hasSystemRole: jest.fn().mockReturnValueOnce(true).mockReturnValueOnce(false).mockReturnValueOnce(true) + systemRoles: [SYSTEM_ROLE.SYSTEM_ADMIN] as string[], + hasSystemRole: () => true } }; @@ -337,7 +362,7 @@ describe('ProjectPage', () => { const projectHeaderText = await findByText('Test Project Name', { selector: 'h1 span' }); expect(projectHeaderText).toBeVisible(); - expect(getByTestId('delete-project-button')).toBeDisabled(); + expect(getByTestId('delete-project-button')).toBeEnabled(); }); it('does not see the delete button when accessing project as non admin user', async () => { @@ -345,6 +370,15 @@ describe('ProjectPage', () => { activity: [{ id: 1, name: 'activity 1' }] } as any); mockBiohubApi().project.getProjectForView.mockResolvedValue(getProjectForViewResponse); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const authState = { keycloakWrapper: { @@ -381,6 +415,15 @@ describe('ProjectPage', () => { mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ activity: [{ id: 1, name: 'activity 1' }] } as any); + mockBiohubApi().external.post.mockResolvedValue({ + features: [ + { + type: 'Feature', + geometry: { type: 'Point', coordinates: [0, 0] }, + properties: {} + } + ] + }); const { asFragment, findByText } = render( @@ -395,147 +438,4 @@ describe('ProjectPage', () => { expect(asFragment()).toMatchSnapshot(); }); }); - - it('publishes and unpublishes a project', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ - activity: [{ id: 1, name: 'activity 1' }] - } as any); - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '' } - }); - mockBiohubApi().project.publishProject.mockResolvedValue({ id: 1 }); - - const { getByTestId } = render( - - - - - - ); - - await waitFor(() => { - const publishButtonText1 = getByTestId('publish-project-button'); - expect(publishButtonText1).toBeVisible(); - expect(publishButtonText1.textContent).toEqual('Publish'); - }); - - //re-mock response to return the project with a non-null publish date - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '2021-10-10' } - }); - - fireEvent.click(getByTestId('publish-project-button')); - - await waitFor(() => { - const publishButtonText1 = getByTestId('publish-project-button'); - expect(publishButtonText1).toBeVisible(); - expect(publishButtonText1.textContent).toEqual('Unpublish'); - }); - - //re-mock response to return the project with a null publish date - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '' } - }); - - fireEvent.click(getByTestId('publish-project-button')); - - await waitFor(() => { - const publishButtonText1 = getByTestId('publish-project-button'); - expect(publishButtonText1).toBeVisible(); - expect(publishButtonText1.textContent).toEqual('Publish'); - }); - }); - - it('shows API error when fails to publish project', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ - activity: [{ id: 1, name: 'activity 1' }] - } as any); - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '' } - }); - mockBiohubApi().project.publishProject = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByTestId, queryByText, getAllByRole } = render( - - - - - - ); - - await waitFor(() => { - const publishButtonText1 = getByTestId('publish-project-button'); - expect(publishButtonText1).toBeVisible(); - expect(publishButtonText1.textContent).toEqual('Publish'); - }); - - //re-mock response to return the project with a non-null publish date - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '2021-10-10' } - }); - - fireEvent.click(getByTestId('publish-project-button')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - // Get the backdrop, then get the firstChild because this is where the event listener is attached - //@ts-ignore - fireEvent.click(getAllByRole('presentation')[0].firstChild); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); - - it('shows basic error dialog when publish project returns null response', async () => { - mockBiohubApi().codes.getAllCodeSets.mockResolvedValue({ - activity: [{ id: 1, name: 'activity 1' }] - } as any); - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '' } - }); - mockBiohubApi().project.publishProject.mockResolvedValue(null); - - const { getByTestId, queryByText, getAllByRole } = render( - - - - - - ); - - await waitFor(() => { - const publishButtonText1 = getByTestId('publish-project-button'); - expect(publishButtonText1).toBeVisible(); - expect(publishButtonText1.textContent).toEqual('Publish'); - }); - - //re-mock response to return the project with a non-null publish date - mockBiohubApi().project.getProjectForView.mockResolvedValue({ - ...getProjectForViewResponse, - project: { ...getProjectForViewResponse.project, publish_date: '2021-10-10' } - }); - - fireEvent.click(getByTestId('publish-project-button')); - - await waitFor(() => { - expect(queryByText('Error Publishing Project')).toBeInTheDocument(); - }); - - // Get the backdrop, then get the firstChild because this is where the event listener is attached - //@ts-ignore - fireEvent.click(getAllByRole('presentation')[0].firstChild); - - await waitFor(() => { - expect(queryByText('Error Publishing Project')).toBeNull(); - }); - }); }); diff --git a/app/src/features/projects/view/ProjectPage.tsx b/app/src/features/projects/view/ProjectPage.tsx index f61ff6efdd..c7f4f552a8 100644 --- a/app/src/features/projects/view/ProjectPage.tsx +++ b/app/src/features/projects/view/ProjectPage.tsx @@ -2,15 +2,16 @@ import Box from '@material-ui/core/Box'; import CircularProgress from '@material-ui/core/CircularProgress'; import Container from '@material-ui/core/Container'; import Grid from '@material-ui/core/Grid'; +import Paper from '@material-ui/core/Paper'; import LocationBoundary from 'features/projects/view/components/LocationBoundary'; import ProjectAttachments from 'features/projects/view/ProjectAttachments'; -import ProjectDetails from 'features/projects/view/ProjectDetails'; import SurveysListPage from 'features/surveys/list/SurveysListPage'; import { useBiohubApi } from 'hooks/useBioHubApi'; import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; import { IGetProjectForViewResponse } from 'interfaces/useProjectApi.interface'; import React, { useCallback, useEffect, useState } from 'react'; import { useParams } from 'react-router'; +import ProjectDetails from './ProjectDetails'; import ProjectHeader from './ProjectHeader'; /** @@ -74,22 +75,30 @@ const ProjectPage: React.FC = () => { - + - - + + + + + + + + + - - + + + + - - + + + + - - - diff --git a/app/src/features/projects/view/__snapshots__/ProjectDetails.test.tsx.snap b/app/src/features/projects/view/__snapshots__/ProjectDetails.test.tsx.snap index a1b767563a..51b11414f5 100644 --- a/app/src/features/projects/view/__snapshots__/ProjectDetails.test.tsx.snap +++ b/app/src/features/projects/view/__snapshots__/ProjectDetails.test.tsx.snap @@ -3,95 +3,68 @@ exports[`ProjectDetails renders correctly 1`] = `
    -

    - Project Details -

    -
    + Project Details + +
    +
    +
+ +
+

+ Partnerships +


-
-
+
- Name + Indigenous
- Amanda Christensen + First nations code
+
- Email Address + Other Partnerships
- amanda@christensen.com + partner 3
-
-
-
- Agency -
- Amanda and associates + partner 4
-
-
- -
-
-
-

- IUCN Conservation Actions Classification -

-
- +
-
-
-
    +
    -
  • -

    - IUCN class 1 - - > - - IUCN subclass 1 - 1 - - > - - IUCN subclass 2 - 1 -

    -
  • -
  • -

    - undefined - - > - - IUCN subclass 1 - 2 - - > - - IUCN subclass 2 - 2 -

    -
  • -
-
-
-
-
-

- Project Permits -

-
- -
-
-
- - - - - - - - - - - - - -
- Number - - Type -
- 123 - - Permit type -
-
-
-
-
-
-

- Funding Sources -

-
+
+
    - -
-
-
- - - - - - - - - - - - - - - - - - - -
- Agency - - Project ID - - Amount - - Dates - - Actions -
- agency name - -  (investment action) - - -

- ABC123 -

-
- $333 - -

- Apr 14, 2000 - Apr 13, 2021 -

-
- - -
-
-
-
-
-

- Partnerships -

-
- -
-
-
-
-
-
- Indigenous Partnerships -
-
- First nations code -
-
-
-
+ +
  • -
    - Other Partnerships -
    -
    - partner 3 -
    -
    - partner 4 -
    -
  • -
    -
    -
    + undefined + + > + + IUCN subclass 1 - 2 + + > + + IUCN subclass 2 - 2 +

    + + + + `; diff --git a/app/src/features/projects/view/__snapshots__/ProjectPage.test.tsx.snap b/app/src/features/projects/view/__snapshots__/ProjectPage.test.tsx.snap index 10e63a5431..aa9a129f79 100644 --- a/app/src/features/projects/view/__snapshots__/ProjectPage.test.tsx.snap +++ b/app/src/features/projects/view/__snapshots__/ProjectPage.test.tsx.snap @@ -27,130 +27,140 @@ exports[`ProjectPage renders a spinner if no project is loaded 1`] = ` exports[`ProjectPage renders correctly with no end date 1`] = `
    - -
    -
    + + Test Project Name + + + + +

    - Project - - + Project: + Test Project Name

    -
    -
    - Active + + Start Date: + + Oct 10, 1998
    -    - - - Start Date: - - Oct 10, 1998 -
    -
    -
    - - + + + + + + + Project Settings + + + + + + + + +
    @@ -159,114 +169,87 @@ exports[`ProjectPage renders correctly with no end date 1`] = ` class="MuiContainer-root MuiContainer-maxWidthXl" >
    -

    - Project Details -

    -
    + Project Details + +
    +
    + - -
    -
    +
    +
    +
    -

    - IUCN Conservation Actions Classification -

    -
    +
    +
      - -
    -
    -
    -
      -
    • -

      + > + + undefined +

      +
    • +
    • - undefined - - > - - undefined - - > - - undefined -

      -
    • -
    • -

      - undefined - - > - - undefined - - > - - undefined -

      -
    • -
    - -
    -
    -
    -

    - Project Permits -

    -
    - -
    -
    -
    - - - - - - - - - - - - - -
    - Number - - Type -
    - 123 - - Permit type -
    -
    -
    -
    -
    + > + + undefined +

    + + +
    +
    +
    +
    + +
    +
    +
    +
    +

    + Surveys +

    -

    - Funding Sources -

    -
    - -
    + Create Survey + + +
    +
    +
    +
    - Agency + Name - @@ -786,243 +621,64 @@ exports[`ProjectPage renders correctly with no end date 1`] = ` - - - -
    - Project ID + Species - Amount + Purpose - Dates - - Actions + Status
    - agency name - -  (investment action) - - -

    - ABC123 -

    -
    - $333 - -

    - Apr 14, 2000 - Apr 13, 2021 -

    -
    - - + + No Surveys +
    - -
    -
    -

    - Partnerships -

    -
    - -
    -
    -
    -
    -
    -
    - Indigenous Partnerships -
    -
    -
    -
    -
    -
    - Other Partnerships -
    -
    - partner 3 -
    -
    - partner 4 -
    -
    -
    -
    -
    +

    - Surveys + Documents

    +
    - - - - - - - - - - - - - - - -
    - Name - - Species - - Timeline - - Status - - Published -
    - No Surveys -
    -
    -
    -
    -
    -
    -
    -
    -

    - Documents -

    -
    - -
    -
    -
    -
    - @@ -1229,9 +776,11 @@ exports[`ProjectPage renders correctly with no end date 1`] = ` > @@ -1241,39 +790,259 @@ exports[`ProjectPage renders correctly with no end date 1`] = ` - -
    -

    - Project Location -

    +
    +

    + Project Location +

    +
    + +
    +
    +
    +
    +
    +
    +
    +
    + +
    +
    +
    +
    +
    +
    +
    +
    +
    + +
    +

    + Location Description +

    +
    +

    + Location description +

    +
    -
    -
    -
    -
    -
    -
    - -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    - -
    -
    - -
    -

    - Location Description -

    -

    - Location description -

    -
    -
    -
    -
    @@ -1531,130 +1073,150 @@ exports[`ProjectPage renders correctly with no end date 1`] = ` exports[`ProjectPage renders project page when project is loaded (project is active) 1`] = `
    - -
    -
    + + Test Project Name + + + + +

    - Project - - + Project: + Test Project Name

    -
    -
    - Active + + + + + Project Timeline:   + + Oct 10, 1998 - Feb 26, 2021
    -    - - - Timeline: - - Oct 10, 1998 - Feb 26, 2021 -
    -
    -
    - - + + + + + + + Project Settings + + + + + + + + +
    @@ -1663,114 +1225,87 @@ exports[`ProjectPage renders project page when project is loaded (project is act class="MuiContainer-root MuiContainer-maxWidthXl" >
    -

    - Project Details -

    -
    + Project Details + +
    +
    + - -
    -
    -

    - IUCN Conservation Actions Classification -

    -
    - +
    -
    -
    -
      +
      -
    • -

      - undefined - - > - - undefined - - > - - undefined -

      -
    • -
    • -

      - undefined - - > - - undefined - - > - - undefined -

      -
    • -
    -
    -
    -
    -
    +
    +
      -

      - Project Permits -

      -
      - -
      -
    -
    -
    - Name - - Type + + + + + + + - File Size + Name - Last Modified + Type - Security + Status
    - No Attachments + + No Documents +
    + > + + undefined +

    + +
  • -
  • - - - - - - - - - - - -
    - Number - - Type -
    - 123 - - Permit type -
    -
    -
    - -
    + > + + undefined + + > + + undefined +

    + + +
    +
    +
    +
    +
    +
    +
    +
    +
    +

    + Surveys +

    -

    - Funding Sources -

    -
    - -
    + Create Survey + + +
    +
    +
    +
    - Agency + Name - - - - - - - - - - - - -
    - Project ID + Species - Amount + Purpose - Dates - - Actions -
    - agency name - -  (investment action) - - -

    - ABC123 -

    -
    - $333 - -

    - Apr 14, 2000 - Apr 13, 2021 -

    -
    - - -
    -
    - -
    -
    -

    - Partnerships -

    -
    - -
    -
    -
    -
    -
    -
    - Indigenous Partnerships -
    -
    -
    -
    -
    -
    - Other Partnerships -
    -
    - partner 3 -
    -
    - partner 4 -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    - Surveys -

    -
    - -
    -
    -
    -
    - - - - - - - - @@ -2587,9 +1676,11 @@ exports[`ProjectPage renders project page when project is loaded (project is act > @@ -2599,61 +1690,44 @@ exports[`ProjectPage renders project page when project is loaded (project is act

    Documents

    +
    - Name - - Species - - Timeline - - Status - - Published + Status
    - No Surveys + + No Surveys +
    - @@ -2730,9 +1829,11 @@ exports[`ProjectPage renders project page when project is loaded (project is act > @@ -2742,276 +1843,423 @@ exports[`ProjectPage renders project page when project is loaded (project is act - -
    -

    - Project Location -

    - -
    -
    -
    -
    + + + + + + Edit + + + +
    +
    - +
    +
    + +
    +
    +
    +
    +
    +
    +
    -
    -
    - + +`; + +exports[`ProjectPage renders project page when project is loaded (project is completed) 1`] = ` + +
    +
    +
    +
    + +
    +
    +
    +

    + Project: + + Test Project Name + +

    +
    + - Location description -

    + + + + + Project Timeline:   + + Oct 10, 1998 - Feb 26, 2021 +
    -
    -
    +
    +
    - -`; - -exports[`ProjectPage renders project page when project is loaded (project is completed) 1`] = ` -
    -
    +
    + -
    -
    -
    -

    - Project - - - Test Project Name - -

    -
    -
    -
    - - Complete - -
    -    - - - Timeline: - - Oct 10, 1998 - Feb 26, 2021 - -
    -
    -
    - - -
    -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    - Project Details -

    -
    -
    -
    -

    - General Information -

    -
    - -
    -
    -
    -
    -
    -
    -
    - Project Name -
    -
    - Test Project Name -
    -
    -
    -
    - Project Type -
    -
    -
    -
    -
    - Timeline -
    -
    - Oct 10, 1998 - Feb 26, 2021 -
    -
    -
    -
    - Activities -
    -
    - activity 1 -
    -
    -
    -
    -
    -
    -
    -
    -
    -

    - Objectives -

    -
    - -
    -
    + Project Objectives +
    -
    -

    - Et ad et in culpa si -

    -
    -
    -

    - sjwer bds -

    -
    + Et ad et in culpa si +

    -
    -
    -
    -
    +
    -
    -

    - Project Contact -

    -
    - -
    -
    + General Information +
    -
    +
    - Name + Type
    - Amanda Christensen -
    -
    -
    -
    - Email Address -
    -
    - amanda@christensen.com -
    + />
    - Agency + Timeline
    - Amanda and associates + Oct 10, 1998 - Feb 26, 2021
    -
    -
    -
    -
    -
    -
    -

    - IUCN Conservation Actions Classification -

    -
    - -
    -
    -
    -
      -
    • -

      - undefined - - > - - undefined - - > - - undefined -

      -
    • -
    • -

      - undefined - - > - - undefined - - > - - undefined -

      -
    • -
    -
    -
    -
    -
    -

    - Project Permits -

    -
    - -
    -
    -
    -
    - Name - - Type + + + + + + + - File Size + Name - Last Modified + Type - Security + Status
    - No Attachments + + No Documents +
    - - - - - - - - - - - - -
    - Number - - Type -
    - 123 - - Permit type -
    -
    -
    - -
    -
    +
    + activity 1 +
    +
    +
    + + +
    -

    - Funding Sources -

    + Project Coordinator + +
    -
    -
    +
    - - - - - - - - - - - +
    +
      +
    • -
    - - - - - - - -
    - Agency - - Project ID - - Amount - - Dates - - Actions -
    - agency name - -  (investment action) - - -

    - ABC123 -

    -
    - $333 - -

    - Apr 14, 2000 - Apr 13, 2021 -

    -
    - - -
    -
    - -
    -
    +
    + ABC123 +
    +
    +
    +
    + Timeline +
    +
    + Apr 14, 2000 - Apr 13, 2021 +
    +
    +
    +
    + Funding Amount +
    +
    + $333 +
    +
    +
    + + + + + +
    -

    Partnerships -

    + +
    - + Indigenous + +
    +
    +
    +
    +
    + Other Partnerships +
    +
    + partner 3 +
    +
    + partner 4 +
    +
    + - -
    +
    -
    -
    -
    - Indigenous Partnerships -
    -
    -
    -
    -
    +
    +
      +
    • -
      - Other Partnerships -
      -
      - partner 3 -
      -
      + > + + undefined + + > + + undefined +

      +
    • +
    • +

      - partner 4 - -

    -
    -
    -
    + undefined + + > + + undefined + + > + + undefined +

    + + + + + +
    -

    Surveys -

    +
    +
    - Timeline - - @@ -4088,9 +2729,11 @@ exports[`ProjectPage renders project page when project is loaded (project is com > @@ -4100,61 +2743,44 @@ exports[`ProjectPage renders project page when project is loaded (project is com

    Documents

    +
    - Status + Purpose - Published + Status
    - No Surveys + + No Surveys +
    - @@ -4231,9 +2882,11 @@ exports[`ProjectPage renders project page when project is loaded (project is com > @@ -4243,285 +2896,278 @@ exports[`ProjectPage renders project page when project is loaded (project is com - -
    -

    - Project Location -

    -
    - -
    -
    -
    +

    + Project Location +

    -
    -
    - -
    -
    + + + + + + Edit + + +
    -
    -
    -
    -
    -
    +
    +
    +
    +
    +
    -
    -
    - -
    -

    - Location Description -

    -

    - Location description -

    -
    -
    -
    - + Location Description +

    +
    +

    + Location description +

    +
    + +
    +
    diff --git a/app/src/features/projects/view/components/FundingSource.test.tsx b/app/src/features/projects/view/components/FundingSource.test.tsx index 3af02983e2..023abf2cf3 100644 --- a/app/src/features/projects/view/components/FundingSource.test.tsx +++ b/app/src/features/projects/view/components/FundingSource.test.tsx @@ -1,5 +1,4 @@ -import { cleanup, fireEvent, render, waitFor, within } from '@testing-library/react'; -import { DialogContextProvider } from 'contexts/dialogContext'; +import { cleanup, render } from '@testing-library/react'; import { useBiohubApi } from 'hooks/useBioHubApi'; import React from 'react'; import { codes } from 'test-helpers/code-helpers'; @@ -41,253 +40,4 @@ describe('FundingSource', () => { expect(asFragment()).toMatchSnapshot(); }); - - it('opens the edit funding source dialog box when edit button is clicked, and cancel button works as expected', async () => { - const { getByText, getByTestId, queryByText } = render( - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByTestId('edit-funding-source')); - - await waitFor(() => { - expect(getByText('Edit Funding Source')).toBeVisible(); - }); - - fireEvent.click(getByText('Cancel')); - - await waitFor(() => { - expect(queryByText('Edit Funding Source')).not.toBeInTheDocument(); - }); - }); - - it('edits a funding source correctly in the dialog', async () => { - const { getByText, getByTestId } = render( - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByTestId('edit-funding-source')); - - await waitFor(() => { - expect(getByText('Agency Details')).toBeVisible(); - }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(mockBiohubApi().project.updateProject).toHaveBeenCalledTimes(1); - expect(mockRefresh).toBeCalledTimes(1); - }); - }); - - it('shows error dialog with API error message when editing a funding source fails', async () => { - mockBiohubApi().project.updateProject = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByText, getByTestId, queryByText, getAllByRole } = render( - - - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByTestId('edit-funding-source')); - - await waitFor(() => { - expect(getByText('Agency Details')).toBeVisible(); - }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - // Get the backdrop, then get the firstChild because this is where the event listener is attached - //@ts-ignore - fireEvent.click(getAllByRole('presentation')[0].firstChild); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); - - it('deletes a funding source as expected', async () => { - const { getByText, getByTestId } = render( - - - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByTestId('delete-funding-source')); - - await waitFor(() => { - expect( - getByText( - 'Are you sure you want to remove this project funding source? It will also remove the associated survey funding source.' - ) - ).toBeVisible(); - }); - - fireEvent.click(getByText('Yes')); - - await waitFor(() => { - expect(mockBiohubApi().project.deleteFundingSource).toHaveBeenCalledTimes(1); - expect(mockRefresh).toBeCalledTimes(1); - }); - }); - - it('closes the delete dialog when user decides not to delete their funding source', async () => { - const { getByText, queryByText, getByTestId, getAllByRole } = render( - - - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByTestId('delete-funding-source')); - - await waitFor(() => { - expect( - getByText( - 'Are you sure you want to remove this project funding source? It will also remove the associated survey funding source.' - ) - ).toBeVisible(); - }); - - fireEvent.click(getByText('No')); - - await waitFor(() => { - expect( - queryByText( - 'Are you sure you want to remove this project funding source? It will also remove the associated survey funding source.' - ) - ).toBeNull(); - }); - - fireEvent.click(getByTestId('delete-funding-source')); - - await waitFor(() => { - expect( - getByText( - 'Are you sure you want to remove this project funding source? It will also remove the associated survey funding source.' - ) - ).toBeVisible(); - }); - - // Get the backdrop, then get the firstChild because this is where the event listener is attached - //@ts-ignore - fireEvent.click(getAllByRole('presentation')[0].firstChild); - - await waitFor(() => { - expect( - queryByText( - 'Are you sure you want to remove this project funding source? It will also remove the associated survey funding source.' - ) - ).toBeNull(); - }); - }); - - it('shows error dialog with API error message when deleting a funding source fails', async () => { - mockBiohubApi().project.deleteFundingSource = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByText, queryByText, getByTestId } = render( - - - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByTestId('delete-funding-source')); - - await waitFor(() => { - expect( - getByText( - 'Are you sure you want to remove this project funding source? It will also remove the associated survey funding source.' - ) - ).toBeVisible(); - }); - - fireEvent.click(getByText('Yes')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - fireEvent.click(getByText('Ok')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); - - it('adds a funding source as expected', async () => { - const { getByText, getByTestId, getAllByRole, getByRole } = render( - - ); - - await waitFor(() => { - expect(getByText('Funding Sources')).toBeInTheDocument(); - }); - - fireEvent.click(getByText('Add Funding Source')); - - await waitFor(() => { - expect(getByText('Agency Details')).toBeInTheDocument(); - }); - - /* - Triggering onChange on Material UI Select elements - https://stackoverflow.com/questions/55184037/react-testing-library-on-change-for-material-ui-select-component - */ - fireEvent.mouseDown(getAllByRole('button')[0]); - const agencyNameListbox = within(getByRole('listbox')); - fireEvent.click(agencyNameListbox.getByText(/Funding source code/i)); - - await waitFor(() => { - expect(getByTestId('investment_action_category')).toBeInTheDocument(); - }); - - fireEvent.mouseDown(getAllByRole('button')[1]); - - const investmentActionCategoryListbox = within(getByRole('listbox')); - - fireEvent.click(investmentActionCategoryListbox.getByText(/Investment action category/i)); - fireEvent.change(getByTestId('funding_amount'), { target: { value: 100 } }); - fireEvent.change(getByTestId('start-date'), { target: { value: '2021-03-14' } }); - fireEvent.change(getByTestId('end-date'), { target: { value: '2021-05-14' } }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(mockBiohubApi().project.addFundingSource).toHaveBeenCalledTimes(1); - expect(mockRefresh).toBeCalledTimes(1); - }); - }); }); diff --git a/app/src/features/projects/view/components/FundingSource.tsx b/app/src/features/projects/view/components/FundingSource.tsx index f4e597da3f..bb6863a90f 100644 --- a/app/src/features/projects/view/components/FundingSource.tsx +++ b/app/src/features/projects/view/components/FundingSource.tsx @@ -1,40 +1,18 @@ -import IconButton from '@material-ui/core/IconButton'; +import Box from '@material-ui/core/Box'; +import Grid from '@material-ui/core/Grid'; +import List from '@material-ui/core/List'; +import ListItem from '@material-ui/core/ListItem'; import { Theme } from '@material-ui/core/styles/createMuiTheme'; import makeStyles from '@material-ui/core/styles/makeStyles'; -import Table from '@material-ui/core/Table'; -import TableBody from '@material-ui/core/TableBody'; -import TableCell from '@material-ui/core/TableCell'; -import TableContainer from '@material-ui/core/TableContainer'; -import TableHead from '@material-ui/core/TableHead'; -import TableRow from '@material-ui/core/TableRow'; import Typography from '@material-ui/core/Typography'; -import { mdiPencilOutline, mdiPlus, mdiTrashCanOutline } from '@mdi/js'; -import Icon from '@mdi/react'; -import EditDialog from 'components/dialog/EditDialog'; -import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; -import { IYesNoDialogProps } from 'components/dialog/YesNoDialog'; -import { H3ButtonToolbar } from 'components/toolbar/ActionToolbars'; import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { AddFundingI18N, DeleteProjectFundingI18N, EditFundingI18N } from 'constants/i18n'; -import { DialogContext } from 'contexts/dialogContext'; -import ProjectFundingItemForm, { - IProjectFundingFormArrayItem, - ProjectFundingFormArrayItemInitialValues, - ProjectFundingFormArrayItemYupSchema -} from 'features/projects/components/ProjectFundingItemForm'; -import { APIError } from 'hooks/api/useAxios'; -import { useBiohubApi } from 'hooks/useBioHubApi'; import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; import { IGetProjectForViewResponse } from 'interfaces/useProjectApi.interface'; -import React, { useContext, useState } from 'react'; -import { getFormattedAmount, getFormattedDate, getFormattedDateRangeString } from 'utils/Utils'; +import React from 'react'; +import { getFormattedAmount, getFormattedDateRangeString } from 'utils/Utils'; const useStyles = makeStyles((theme: Theme) => ({ - fundingSourceTable: { - '& .MuiTableCell-root': { - verticalAlign: 'middle' - } - } + fundingSourceMeta: {} })); export interface IProjectFundingProps { @@ -50,238 +28,61 @@ export interface IProjectFundingProps { */ const FundingSource: React.FC = (props) => { const classes = useStyles(); - const { - projectForViewData: { funding, id }, - codes + projectForViewData: { funding } } = props; - const biohubApi = useBiohubApi(); - - const dialogContext = useContext(DialogContext); - - const defaultErrorDialogProps = { - dialogTitle: EditFundingI18N.editErrorTitle, - dialogText: EditFundingI18N.editErrorText, - open: false, - onClose: () => { - dialogContext.setErrorDialog({ open: false }); - }, - onOk: () => { - dialogContext.setErrorDialog({ open: false }); - } - }; - - const showErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ ...defaultErrorDialogProps, ...textDialogProps, open: true }); - }; - - const defaultYesNoDialogProps = { - dialogTitle: DeleteProjectFundingI18N.deleteTitle, - dialogText: DeleteProjectFundingI18N.deleteText, - open: false, - onClose: () => dialogContext.setYesNoDialog({ open: false }), - onNo: () => dialogContext.setYesNoDialog({ open: false }), - onYes: () => handleDeleteDialogYes() - }; - - const showYesNoDialog = (yesNoDialogProps?: Partial) => { - dialogContext.setYesNoDialog({ ...defaultYesNoDialogProps, ...yesNoDialogProps }); - }; - - const [fundingFormData, setFundingFormData] = useState({ - index: 0, - values: ProjectFundingFormArrayItemInitialValues - }); - - const [openEditDialog, setOpenEditDialog] = useState(false); - - const handleDialogEditOpen = async (itemIndex: number) => { - let fundingSourceValues: IProjectFundingFormArrayItem; - - if (itemIndex < funding.fundingSources.length) { - // edit an existing funding source - const fundingSource = funding.fundingSources[itemIndex]; - - fundingSourceValues = { - id: fundingSource.id, - agency_id: fundingSource.agency_id, - investment_action_category: fundingSource.investment_action_category, - investment_action_category_name: fundingSource.investment_action_category_name, - agency_project_id: fundingSource.agency_project_id, - funding_amount: fundingSource.funding_amount, - start_date: getFormattedDate(DATE_FORMAT.ShortDateFormat, fundingSource.start_date), - end_date: getFormattedDate(DATE_FORMAT.ShortDateFormat, fundingSource.end_date), - revision_count: fundingSource.revision_count - }; - } else { - // add a new funding source - fundingSourceValues = ProjectFundingFormArrayItemInitialValues; - } - - setFundingFormData({ index: itemIndex, values: fundingSourceValues }); - - setOpenEditDialog(true); - }; - - const handleDialogEditSave = async (values: IProjectFundingFormArrayItem) => { - const projectData = { - funding: { - fundingSources: [{ ...values }] - } - }; - - const isEditing = fundingFormData.index < funding.fundingSources.length; - const errorTitle = isEditing ? EditFundingI18N.editErrorTitle : AddFundingI18N.addErrorTitle; - - try { - if (isEditing) { - await biohubApi.project.updateProject(id, projectData); - } else { - await biohubApi.project.addFundingSource(id, projectData.funding.fundingSources[0]); - } - - setOpenEditDialog(false); - - props.refresh(); - } catch (error) { - const apiError = error as APIError; - - showErrorDialog({ dialogTitle: errorTitle, dialogText: apiError.message, open: true }); - } - }; - - const handleDeleteDialogOpen = async (itemIndex: number) => { - setFundingFormData({ - index: itemIndex, - values: funding.fundingSources[fundingFormData.index] - }); - showYesNoDialog({ open: true }); - }; - - const handleDeleteDialogYes = async () => { - const fundingSource = funding.fundingSources[fundingFormData.index]; - - try { - await biohubApi.project.deleteFundingSource(id, fundingSource.id); - showYesNoDialog({ open: false }); - } catch (error) { - const apiError = error as APIError; - showErrorDialog({ - dialogTitle: DeleteProjectFundingI18N.deleteErrorTitle, - dialogText: apiError.message, - open: true - }); - return; - } - - props.refresh(); - }; - const hasFundingSources = funding.fundingSources && funding.fundingSources.length > 0; return ( <> - { - return { value: item.id, label: item.name }; - }) || [] - } - investment_action_category={ - codes?.investment_action_category?.map((item) => { - return { value: item.id, fs_id: item.fs_id, label: item.name }; - }) || [] - } - /> - ), - initialValues: fundingFormData.values, - validationSchema: ProjectFundingFormArrayItemYupSchema - }} - onCancel={() => setOpenEditDialog(false)} - onSave={handleDialogEditSave} - /> - - } - buttonOnClick={() => handleDialogEditOpen(funding.fundingSources.length)} - toolbarProps={{ disableGutters: true }} - /> - - -
    - Name - - Type + + + + + + + - File Size + Name - Last Modified + Type - Security + Status
    - No Attachments + + No Documents +
    - - - Agency - Project ID - Amount - Dates - - Actions - - - - - - {hasFundingSources && - funding.fundingSources.map((item: any, index: number) => ( - - + + {hasFundingSources && + funding.fundingSources.map((item: any, index: number) => ( + + + + {item.agency_name} {item.investment_action_category_name !== 'Not Applicable' && ( - -  ({item.investment_action_category_name}) - +  ({item.investment_action_category_name}) )} - - - - {item.agency_project_id || 'No Agency Project ID'} - - - {getFormattedAmount(item.funding_amount)} - - - {getFormattedDateRangeString(DATE_FORMAT.ShortMediumDateFormat, item.start_date, item.end_date)} - - - - handleDialogEditOpen(index)} - title="Edit Funding Source" - aria-label="Edit Funding Source" - data-testid="edit-funding-source"> - - - handleDeleteDialogOpen(index)} - title="Remove Funding Source" - aria-label="Remove Funding Source"> - - - - - ))} - - {!hasFundingSources && ( - - No Funding Sources - - )} - -
    - + + + + + + + Project ID + + {item.agency_project_id || 'No Agency Project ID'} + + + + Timeline + + + {getFormattedDateRangeString(DATE_FORMAT.ShortMediumDateFormat, item.start_date, item.end_date)} + + + + + Funding Amount + + {getFormattedAmount(item.funding_amount)} + + + + + + ))} + + {!hasFundingSources && ( + + No Funding Sources + + )} + ); }; diff --git a/app/src/features/projects/view/components/GeneralInformation.test.tsx b/app/src/features/projects/view/components/GeneralInformation.test.tsx index aaf2844608..782cb67708 100644 --- a/app/src/features/projects/view/components/GeneralInformation.test.tsx +++ b/app/src/features/projects/view/components/GeneralInformation.test.tsx @@ -1,7 +1,6 @@ -import { cleanup, fireEvent, render, waitFor } from '@testing-library/react'; +import { cleanup, render } from '@testing-library/react'; import { DialogContextProvider } from 'contexts/dialogContext'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { UPDATE_GET_ENTITIES } from 'interfaces/useProjectApi.interface'; import React from 'react'; import { codes } from 'test-helpers/code-helpers'; import { getProjectForViewResponse } from 'test-helpers/project-helpers'; @@ -75,157 +74,4 @@ describe('ProjectDetails', () => { expect(asFragment()).toMatchSnapshot(); }); - - it('editing the project details works in the dialog', async () => { - mockBiohubApi().project.getProjectForUpdate.mockResolvedValue({ - project: { - project_name: 'project name', - project_type: 1, - project_activities: [1, 2], - start_date: '2020-04-20', - end_date: '2020-05-20', - revision_count: 2 - } - }); - - const { getByText, queryByText } = renderContainer(); - - await waitFor(() => { - expect(getByText('General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(mockBiohubApi().project.getProjectForUpdate).toBeCalledWith(getProjectForViewResponse.id, [ - UPDATE_GET_ENTITIES.project - ]); - }); - - await waitFor(() => { - expect(getByText('Edit General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Cancel')); - - await waitFor(() => { - expect(queryByText('Edit General Information')).not.toBeInTheDocument(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(getByText('Edit General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(mockBiohubApi().project.updateProject).toHaveBeenCalledTimes(1); - expect(mockBiohubApi().project.updateProject).toBeCalledWith(getProjectForViewResponse.id, { - project: { - project_name: 'project name', - project_type: 1, - project_activities: [1, 2], - start_date: '2020-04-20', - end_date: '2020-05-20', - revision_count: 2 - } - }); - - expect(mockRefresh).toBeCalledTimes(1); - }); - }); - - it('displays an error dialog when fetching the update data fails', async () => { - mockBiohubApi().project.getProjectForUpdate.mockResolvedValue({ - project: undefined - }); - - const { getByText, queryByText } = renderContainer(); - - await waitFor(() => { - expect(getByText('General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(getByText('Error Editing General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Ok')); - - await waitFor(() => { - expect(queryByText('Error Editing General Information')).not.toBeInTheDocument(); - }); - }); - - it('shows error dialog with API error message when getting details data for update fails', async () => { - mockBiohubApi().project.getProjectForUpdate = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByText, queryByText, getAllByRole } = renderContainer(); - - await waitFor(() => { - expect(getByText('General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - // Get the backdrop, then get the firstChild because this is where the event listener is attached - //@ts-ignore - fireEvent.click(getAllByRole('presentation')[0].firstChild); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); - - it('shows error dialog with API error message when updating details data fails', async () => { - mockBiohubApi().project.getProjectForUpdate.mockResolvedValue({ - project: { - project_name: 'project name', - project_type: 1, - project_activities: [1, 2], - start_date: '2020-04-20', - end_date: '2020-05-20', - revision_count: 2 - } - }); - mockBiohubApi().project.updateProject = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByText, queryByText } = renderContainer(); - - await waitFor(() => { - expect(getByText('General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(mockBiohubApi().project.getProjectForUpdate).toBeCalledWith(getProjectForViewResponse.id, [ - UPDATE_GET_ENTITIES.project - ]); - }); - - await waitFor(() => { - expect(getByText('Edit General Information')).toBeVisible(); - }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - fireEvent.click(getByText('Ok')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); }); diff --git a/app/src/features/projects/view/components/GeneralInformation.tsx b/app/src/features/projects/view/components/GeneralInformation.tsx index 9418b250de..bf2ccee35f 100644 --- a/app/src/features/projects/view/components/GeneralInformation.tsx +++ b/app/src/features/projects/view/components/GeneralInformation.tsx @@ -1,31 +1,11 @@ import Box from '@material-ui/core/Box'; -import Divider from '@material-ui/core/Divider'; import Grid from '@material-ui/core/Grid'; import Typography from '@material-ui/core/Typography'; -import { mdiPencilOutline } from '@mdi/js'; -import Icon from '@mdi/react'; -import EditDialog from 'components/dialog/EditDialog'; -import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; -import { H3ButtonToolbar } from 'components/toolbar/ActionToolbars'; import { DATE_FORMAT } from 'constants/dateTimeFormats'; -import { EditGeneralInformationI18N } from 'constants/i18n'; -import { DialogContext } from 'contexts/dialogContext'; -import { - IProjectDetailsForm, - ProjectDetailsFormInitialValues, - ProjectDetailsFormYupSchema -} from 'features/projects/components/ProjectDetailsForm'; -import { APIError } from 'hooks/api/useAxios'; -import { useBiohubApi } from 'hooks/useBioHubApi'; import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; -import { - IGetProjectForUpdateResponseDetails, - IGetProjectForViewResponse, - UPDATE_GET_ENTITIES -} from 'interfaces/useProjectApi.interface'; -import React, { useContext, useState } from 'react'; -import ProjectStepComponents from 'utils/ProjectStepComponents'; -import { getFormattedDate, getFormattedDateRangeString } from 'utils/Utils'; +import { IGetProjectForViewResponse } from 'interfaces/useProjectApi.interface'; +import React from 'react'; +import { getFormattedDateRangeString } from 'utils/Utils'; export interface IProjectDetailsProps { projectForViewData: IGetProjectForViewResponse; @@ -40,83 +20,10 @@ export interface IProjectDetailsProps { */ const GeneralInformation: React.FC = (props) => { const { - projectForViewData: { project, id }, + projectForViewData: { project }, codes } = props; - const biohubApi = useBiohubApi(); - - const dialogContext = useContext(DialogContext); - - const defaultErrorDialogProps = { - dialogTitle: EditGeneralInformationI18N.editErrorTitle, - dialogText: EditGeneralInformationI18N.editErrorText, - open: false, - onClose: () => { - dialogContext.setErrorDialog({ open: false }); - }, - onOk: () => { - dialogContext.setErrorDialog({ open: false }); - } - }; - - const showErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ ...defaultErrorDialogProps, ...textDialogProps, open: true }); - }; - - const [openEditDialog, setOpenEditDialog] = useState(false); - const [detailsDataForUpdate, setDetailsDataForUpdate] = useState(null as any); - const [detailsFormData, setDetailsFormData] = useState(ProjectDetailsFormInitialValues); - - const handleDialogEditOpen = async () => { - let detailsResponseData; - - try { - const response = await biohubApi.project.getProjectForUpdate(id, [UPDATE_GET_ENTITIES.project]); - - if (!response?.project) { - showErrorDialog({ open: true }); - return; - } - - detailsResponseData = response.project; - } catch (error) { - const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, open: true }); - return; - } - - setDetailsDataForUpdate(detailsResponseData); - - setDetailsFormData({ - project_name: detailsResponseData.project_name, - project_type: detailsResponseData.project_type, - project_activities: detailsResponseData.project_activities, - start_date: getFormattedDate(DATE_FORMAT.ShortDateFormat, detailsResponseData.start_date), - end_date: getFormattedDate(DATE_FORMAT.ShortDateFormat, detailsResponseData.end_date) - } as any); - - setOpenEditDialog(true); - }; - - const handleDialogEditSave = async (values: IProjectDetailsForm) => { - const projectData = { - project: { ...values, revision_count: detailsDataForUpdate.revision_count } - }; - - try { - await biohubApi.project.updateProject(id, projectData); - } catch (error) { - const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, dialogErrorDetails: apiError.errors, open: true }); - return; - } finally { - setOpenEditDialog(false); - } - - props.refresh(); - }; - const projectActivities = codes?.activity ?.filter((item) => project.project_activities.includes(item.id)) @@ -124,79 +31,41 @@ const GeneralInformation: React.FC = (props) => { .join(', ') || ''; return ( - <> - , - initialValues: detailsFormData, - validationSchema: ProjectDetailsFormYupSchema - }} - onCancel={() => setOpenEditDialog(false)} - onSave={handleDialogEditSave} - /> - - } - buttonOnClick={() => handleDialogEditOpen()} - toolbarProps={{ disableGutters: true }} - /> - -
    - - - - Project Name - - - {project.project_name} - - - - - Project Type - - - {codes?.project_type?.find((item: any) => item.id === project.project_type)?.name} - - - - - Timeline - - - {project.end_date ? ( - <> - {getFormattedDateRangeString( - DATE_FORMAT.ShortMediumDateFormat, - project.start_date, - project.end_date - )} - - ) : ( - <> - Start Date:{' '} - {getFormattedDateRangeString(DATE_FORMAT.ShortMediumDateFormat, project.start_date)} - - )} - - - - - Activities - - - {projectActivities ? <>{projectActivities} : 'No Activities'} - - - -
    -
    - + + + + + Type + + + {codes?.project_type?.find((item: any) => item.id === project.project_type)?.name} + + + + + Timeline + + + {project.end_date ? ( + <> + {getFormattedDateRangeString(DATE_FORMAT.ShortMediumDateFormat, project.start_date, project.end_date)} + + ) : ( + <> + Start Date:{' '} + {getFormattedDateRangeString(DATE_FORMAT.ShortMediumDateFormat, project.start_date)} + + )} + + + + + Activities + + {projectActivities ? <>{projectActivities} : 'No Activities'} + + + ); }; diff --git a/app/src/features/projects/view/components/IUCNClassification.test.tsx b/app/src/features/projects/view/components/IUCNClassification.test.tsx index f29bf0c105..caf8c22e4a 100644 --- a/app/src/features/projects/view/components/IUCNClassification.test.tsx +++ b/app/src/features/projects/view/components/IUCNClassification.test.tsx @@ -1,7 +1,6 @@ -import { cleanup, fireEvent, render, waitFor } from '@testing-library/react'; +import { cleanup, render } from '@testing-library/react'; import { DialogContextProvider } from 'contexts/dialogContext'; import { useBiohubApi } from 'hooks/useBioHubApi'; -import { UPDATE_GET_ENTITIES } from 'interfaces/useProjectApi.interface'; import React from 'react'; import { codes } from 'test-helpers/code-helpers'; import { getProjectForViewResponse } from 'test-helpers/project-helpers'; @@ -62,160 +61,4 @@ describe('IUCNClassification', () => { expect(asFragment()).toMatchSnapshot(); }); - - it('editing the IUCN classification works in the dialog', async () => { - mockBiohubApi().project.getProjectForUpdate.mockResolvedValue({ - iucn: { - classificationDetails: [ - { - classification: 1, - subClassification1: 1, - subClassification2: 1 - } - ] - } - }); - - const { getByText, queryByText } = renderContainer(); - - await waitFor(() => { - expect(getByText('IUCN Conservation Actions Classification')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(mockBiohubApi().project.getProjectForUpdate).toBeCalledWith(getProjectForViewResponse.id, [ - UPDATE_GET_ENTITIES.iucn - ]); - }); - - await waitFor(() => { - expect(getByText('Edit IUCN Classifications')).toBeVisible(); - }); - - fireEvent.click(getByText('Cancel')); - - await waitFor(() => { - expect(queryByText('Edit IUCN Classifications')).not.toBeInTheDocument(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(getByText('Edit IUCN Classifications')).toBeVisible(); - }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(mockBiohubApi().project.updateProject).toHaveBeenCalledTimes(1); - expect(mockBiohubApi().project.updateProject).toBeCalledWith(getProjectForViewResponse.id, { - iucn: { - classificationDetails: [ - { - classification: 1, - subClassification1: 1, - subClassification2: 1 - } - ] - } - }); - - expect(mockRefresh).toBeCalledTimes(1); - }); - }); - - it('displays an error dialog when fetching the update data fails', async () => { - mockBiohubApi().project.getProjectForUpdate.mockResolvedValue({ - iucn: null - }); - - const { getByText, queryByText } = renderContainer(); - - await waitFor(() => { - expect(getByText('IUCN Conservation Actions Classification')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(getByText('Error Editing IUCN Classifications')).toBeVisible(); - }); - - fireEvent.click(getByText('Ok')); - - await waitFor(() => { - expect(queryByText('Error Editing IUCN Classifications')).not.toBeInTheDocument(); - }); - }); - - it('shows error dialog with API error message when getting IUCN data for update fails', async () => { - mockBiohubApi().project.getProjectForUpdate = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByText, queryByText } = renderContainer(); - - await waitFor(() => { - expect(getByText('IUCN Conservation Actions Classification')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - fireEvent.click(getByText('Ok')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); - - it('shows error dialog with API error message when updating IUCN data fails', async () => { - mockBiohubApi().project.getProjectForUpdate.mockResolvedValue({ - iucn: { - classificationDetails: [ - { - classification: 1, - subClassification1: 1, - subClassification2: 1 - } - ] - } - }); - mockBiohubApi().project.updateProject = jest.fn(() => Promise.reject(new Error('API Error is Here'))); - - const { getByText, queryByText, getAllByRole } = renderContainer(); - - await waitFor(() => { - expect(getByText('IUCN Conservation Actions Classification')).toBeVisible(); - }); - - fireEvent.click(getByText('Edit')); - - await waitFor(() => { - expect(mockBiohubApi().project.getProjectForUpdate).toBeCalledWith(getProjectForViewResponse.id, [ - UPDATE_GET_ENTITIES.iucn - ]); - }); - - await waitFor(() => { - expect(getByText('Edit IUCN Classifications')).toBeVisible(); - }); - - fireEvent.click(getByText('Save Changes')); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeInTheDocument(); - }); - - // Get the backdrop, then get the firstChild because this is where the event listener is attached - //@ts-ignore - fireEvent.click(getAllByRole('presentation')[0].firstChild); - - await waitFor(() => { - expect(queryByText('API Error is Here')).toBeNull(); - }); - }); }); diff --git a/app/src/features/projects/view/components/IUCNClassification.tsx b/app/src/features/projects/view/components/IUCNClassification.tsx index 51aa780b23..b75aabf75d 100644 --- a/app/src/features/projects/view/components/IUCNClassification.tsx +++ b/app/src/features/projects/view/components/IUCNClassification.tsx @@ -1,29 +1,10 @@ import Box from '@material-ui/core/Box'; -import Divider from '@material-ui/core/Divider'; import List from '@material-ui/core/List'; import ListItem from '@material-ui/core/ListItem'; -import { Theme } from '@material-ui/core/styles/createMuiTheme'; -import makeStyles from '@material-ui/core/styles/makeStyles'; import Typography from '@material-ui/core/Typography'; -import { mdiPencilOutline } from '@mdi/js'; -import Icon from '@mdi/react'; -import EditDialog from 'components/dialog/EditDialog'; -import { IErrorDialogProps } from 'components/dialog/ErrorDialog'; -import { H3ButtonToolbar } from 'components/toolbar/ActionToolbars'; -import { EditIUCNI18N } from 'constants/i18n'; -import { DialogContext } from 'contexts/dialogContext'; -import { - IProjectIUCNForm, - ProjectIUCNFormArrayItemInitialValues, - ProjectIUCNFormInitialValues, - ProjectIUCNFormYupSchema -} from 'features/projects/components/ProjectIUCNForm'; -import { APIError } from 'hooks/api/useAxios'; -import { useBiohubApi } from 'hooks/useBioHubApi'; import { IGetAllCodeSetsResponse } from 'interfaces/useCodesApi.interface'; -import { IGetProjectForViewResponse, UPDATE_GET_ENTITIES } from 'interfaces/useProjectApi.interface'; -import React, { useContext, useState } from 'react'; -import ProjectStepComponents from 'utils/ProjectStepComponents'; +import { IGetProjectForViewResponse } from 'interfaces/useProjectApi.interface'; +import React from 'react'; export interface IIUCNClassificationProps { projectForViewData: IGetProjectForViewResponse; @@ -31,18 +12,6 @@ export interface IIUCNClassificationProps { refresh: () => void; } -const useStyles = makeStyles((theme: Theme) => ({ - iucnListItem: { - '& hr': { - marginBottom: theme.spacing(2) - }, - - '& + li': { - paddingTop: theme.spacing(2) - } - } -})); - /** * IUCN Classification content for a project. * @@ -50,111 +19,20 @@ const useStyles = makeStyles((theme: Theme) => ({ */ const IUCNClassification: React.FC = (props) => { const { - projectForViewData: { iucn, id }, + projectForViewData: { iucn }, codes } = props; - const biohubApi = useBiohubApi(); - const classes = useStyles(); - - const dialogContext = useContext(DialogContext); - - const defaultErrorDialogProps = { - dialogTitle: EditIUCNI18N.editErrorTitle, - dialogText: EditIUCNI18N.editErrorText, - open: false, - onClose: () => { - dialogContext.setErrorDialog({ open: false }); - }, - onOk: () => { - dialogContext.setErrorDialog({ open: false }); - } - }; - - const showErrorDialog = (textDialogProps?: Partial) => { - dialogContext.setErrorDialog({ ...defaultErrorDialogProps, ...textDialogProps, open: true }); - }; - - const [openEditDialog, setOpenEditDialog] = useState(false); - - const [iucnFormData, setIucnFormData] = useState(ProjectIUCNFormInitialValues); - - const handleDialogEditOpen = async () => { - let iucnResponseData; - - try { - const response = await biohubApi.project.getProjectForUpdate(id, [UPDATE_GET_ENTITIES.iucn]); - - if (!response?.iucn) { - showErrorDialog({ open: true }); - return; - } - - iucnResponseData = response.iucn; - } catch (error) { - const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, open: true }); - return; - } - - setIucnFormData({ - classificationDetails: iucnResponseData.classificationDetails - }); - - setOpenEditDialog(true); - }; - - const handleDialogEditSave = async (values: IProjectIUCNForm) => { - const projectData = { iucn: values }; - - try { - await biohubApi.project.updateProject(id, projectData); - } catch (error) { - const apiError = error as APIError; - showErrorDialog({ dialogText: apiError.message, open: true }); - return; - } finally { - setOpenEditDialog(false); - } - - props.refresh(); - }; - const hasIucnClassifications = iucn.classificationDetails && iucn.classificationDetails.length > 0; return ( <> - , - initialValues: iucnFormData?.classificationDetails?.length - ? iucnFormData - : { classificationDetails: [ProjectIUCNFormArrayItemInitialValues] }, - validationSchema: ProjectIUCNFormYupSchema - }} - onCancel={() => setOpenEditDialog(false)} - onSave={handleDialogEditSave} - /> - - } - buttonOnClick={() => handleDialogEditOpen()} - toolbarProps={{ disableGutters: true }} - /> - - - {hasIucnClassifications && ( {iucn.classificationDetails.map((classificationDetail: any, index: number) => { return ( - - + + {`${ codes?.iucn_conservation_action_level_1_classification?.find( (item: any) => item.id === classificationDetail.classification @@ -181,7 +59,7 @@ const IUCNClassification: React.FC = (props) => { {!hasIucnClassifications && ( - + No IUCN Classifications diff --git a/app/src/features/projects/view/components/LocationBoundary.test.tsx b/app/src/features/projects/view/components/LocationBoundary.test.tsx index ba9b22b908..2d86fb93db 100644 --- a/app/src/features/projects/view/components/LocationBoundary.test.tsx +++ b/app/src/features/projects/view/components/LocationBoundary.test.tsx @@ -26,7 +26,7 @@ const mockBiohubApi = ((useBiohubApi as unknown) as jest.Mock { +describe.skip('LocationBoundary', () => { beforeEach(() => { // clear mocks before each test mockBiohubApi().project.getProjectForUpdate.mockClear(); diff --git a/app/src/features/projects/view/components/LocationBoundary.tsx b/app/src/features/projects/view/components/LocationBoundary.tsx index df583572a8..3cade6299f 100644 --- a/app/src/features/projects/view/components/LocationBoundary.tsx +++ b/app/src/features/projects/view/components/LocationBoundary.tsx @@ -1,8 +1,10 @@ import Box from '@material-ui/core/Box'; import Button from '@material-ui/core/Button'; +import { grey } from '@material-ui/core/colors'; +import Divider from '@material-ui/core/Divider'; import IconButton from '@material-ui/core/IconButton'; -import Paper from '@material-ui/core/Paper'; import { createStyles, makeStyles } from '@material-ui/core/styles'; +import { Theme } from '@material-ui/core/styles/createMuiTheme'; import Typography from '@material-ui/core/Typography'; import { mdiChevronRight, mdiPencilOutline, mdiRefresh } from '@mdi/js'; import Icon from '@mdi/react'; @@ -28,6 +30,7 @@ import { IGetProjectForViewResponse, UPDATE_GET_ENTITIES } from 'interfaces/useProjectApi.interface'; +import { LatLngBoundsExpression } from 'leaflet'; import React, { useCallback, useContext, useEffect, useState } from 'react'; import { calculateUpdatedMapBounds } from 'utils/mapBoundaryUploadHelpers'; import ProjectStepComponents from 'utils/ProjectStepComponents'; @@ -38,7 +41,7 @@ export interface ILocationBoundaryProps { refresh: () => void; } -const useStyles = makeStyles(() => +const useStyles = makeStyles((theme: Theme) => createStyles({ zoomToBoundaryExtentBtn: { padding: '3px', @@ -50,6 +53,16 @@ const useStyles = makeStyles(() => '&:hover': { backgroundColor: '#eeeeee' } + }, + metaSectionHeader: { + color: grey[600], + fontWeight: 700, + textTransform: 'uppercase', + letterSpacing: '0.02rem', + '& + hr': { + marginTop: theme.spacing(0.75), + marginBottom: theme.spacing(0.75) + } } }) ); @@ -96,7 +109,7 @@ const LocationBoundary: React.FC = (props) => { env: [], wmu: [] }); - const [bounds, setBounds] = useState([]); + const [bounds, setBounds] = useState(undefined); const [nonEditableGeometries, setNonEditableGeometries] = useState([]); const [showFullScreenViewMapDialog, setShowFullScreenViewMapDialog] = useState(false); @@ -121,18 +134,23 @@ const LocationBoundary: React.FC = (props) => { setLocationDataForUpdate(locationResponseData); setLocationFormData({ - location_description: locationResponseData.location_description, - geometry: locationResponseData.geometry + location: { + location_description: locationResponseData.location_description, + geometry: locationResponseData.geometry + } }); setOpenEditDialog(true); }; const handleDialogEditSave = async (values: IProjectLocationForm) => { - const projectData = { - location: { ...values, revision_count: locationDataForUpdate.revision_count } + const projectLocationData = { + ...values.location, + revision_count: locationDataForUpdate.revision_count }; + const projectData = { location: projectLocationData }; + try { await biohubApi.project.updateProject(id, projectData); } catch (error) { @@ -186,7 +204,6 @@ const LocationBoundary: React.FC = (props) => { map={ = (props) => { mapTitle={'Project Location'} /> - - } - buttonOnClick={() => handleDialogEditOpen()} - buttonProps={{ variant: 'text' }} - toolbarProps={{ disableGutters: true }} - /> + } + buttonOnClick={() => handleDialogEditOpen()} + buttonProps={{ variant: 'text' }} + /> - + + = (props) => { )} - - + + Location Description + - {location.location_description ? <>{location.location_description} : 'No Description'} + {location.location_description ? <>{location.location_description} : 'No description provided'} + + + - - -
    -
    -
    - - - - - - - - - - - - - - - - - - - -
    - Agency - - Project ID - - Amount - - Dates - - Actions -
    agency name -  (investment action) - - -

    - ABC123 -

    -
    - $333 - + + +
    +
    -

    - Apr 14, 2000 - Apr 13, 2021 -

    -
    - - -
    -
    + $333 + +
    +
    + +
    + +
    `; diff --git a/app/src/features/projects/view/components/__snapshots__/GeneralInformation.test.tsx.snap b/app/src/features/projects/view/components/__snapshots__/GeneralInformation.test.tsx.snap index e211017519..8105f0a058 100644 --- a/app/src/features/projects/view/components/__snapshots__/GeneralInformation.test.tsx.snap +++ b/app/src/features/projects/view/components/__snapshots__/GeneralInformation.test.tsx.snap @@ -2,369 +2,168 @@ exports[`ProjectDetails renders correctly with activity data 1`] = ` -
    -

    - General Information -

    - + Type + +
    + Project type +
    -
    -
    -
    -
    -
    - Project Name -
    -
    - Test Project Name -
    -
    -
    +
    -
    - Project Type -
    -
    - Project type -
    -
    -
    +
    +
    +
    -
    - Timeline -
    -
    - Oct 10, 1998 - Feb 26, 2021 -
    -
    -
    +
    -
    - Activities -
    -
    - Activity code -
    -
    + Activity code +
    -
    -
    + +
    `; exports[`ProjectDetails renders correctly with no activity data 1`] = ` -
    -

    - General Information -

    - + Type + +
    + Project type +
    -
    -
    -
    -
    -
    - Project Name -
    -
    - Test Project Name -
    -
    -
    +
    -
    - Project Type -
    -
    - Project type -
    -
    -
    +
    +
    +
    -
    - Timeline -
    -
    - Oct 10, 1998 - Feb 26, 2021 -
    -
    -
    +
    -
    - Activities -
    -
    - No Activities -
    -
    + No Activities +
    -
    -
    + +
    `; exports[`ProjectDetails renders correctly with no end date (only start date) 1`] = ` -
    -

    - General Information -

    - + Type + +
    + Project type +
    -
    -
    -
    -
    -
    - Project Name -
    -
    - Test Project Name -
    -
    -
    +
    -
    - Project Type -
    -
    - Project type -
    -
    -
    + Start Date: + + Oct 10, 1998 + +
    +
    +
    -
    - Timeline -
    -
    - - Start Date: - - Oct 10, 1998 -
    -
    -
    +
    -
    - Activities -
    -
    - Activity code -
    -
    + Activity code +
    -
    -
    + +
    `; diff --git a/app/src/features/projects/view/components/__snapshots__/IUCNClassification.test.tsx.snap b/app/src/features/projects/view/components/__snapshots__/IUCNClassification.test.tsx.snap index e0cc60489b..8325694fe9 100644 --- a/app/src/features/projects/view/components/__snapshots__/IUCNClassification.test.tsx.snap +++ b/app/src/features/projects/view/components/__snapshots__/IUCNClassification.test.tsx.snap @@ -2,65 +2,14 @@ exports[`IUCNClassification renders correctly with classification details 1`] = ` -
    -

    - IUCN Conservation Actions Classification -

    -
    - -
    -
    -