Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Release Dev to Test #1182

Merged
merged 13 commits into from
Dec 12, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
6 changes: 2 additions & 4 deletions .github/workflows/cleanClosedPR.yml
Original file line number Diff line number Diff line change
@@ -1,18 +1,16 @@
# Clean out all deployment artifacts when a PR is closed, but not merged.
# Will attempt to remove all artifacts from any PR that was opened against any branch (and then closed (not merged)), except for test and prod.
# Will attempt to remove all artifacts from any PR that was opened against any branch (and then closed (not merged)).
name: Clean Closed PR Artifacts

on:
pull_request:
types: [closed]
branches-ignore:
- test
- prod

jobs:
clean:
name: Clean Deployment Artifacts for API and App in Dev and Tools environment
runs-on: ubuntu-latest
timeout-minutes: 20
# Don't run if the PR was merged
if: ${{ github.event.pull_request.merged != true }}
env:
Expand Down
83 changes: 83 additions & 0 deletions .github/workflows/cleanMergedPR.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# Clean out all deployment artifacts when a PR is merged against a non-standard base branch (aka: neither dev, test, or prod)
# Standard branches (aka: dev, test, prod) have their own cleanup routine that runs as part of the deployStatic action.
name: Clean Merged PR Artifacts

on:
pull_request:
types: [closed]
branches-ignore:
- dev
- test
- prod

jobs:
clean:
name: Clean Deployment Artifacts for API and App in Dev and Tools environment
runs-on: ubuntu-latest
timeout-minutes: 20
# Only run if the PR was merged
if: ${{ github.event.pull_request.merged == true }}
env:
PR_NUMBER: ${{ github.event.number }}
steps:
# Install Node - for `node` and `npm` commands
# Note: This already uses actions/cache internally, so repeat calls in subsequent jobs are not a performance hit
- name: Setup Node.js
uses: actions/setup-node@v3
with:
node-version: 14

# Load repo from cache
- name: Cache repo
uses: actions/cache@v3
id: cache-repo
env:
cache-name: cache-repo
with:
path: ${{ github.workspace }}/*
key: ${{ runner.os }}-build-${{ env.cache-name }}-${{ github.sha }}

# Checkout the branch if not restored via cache
- name: Checkout Target Branch
if: steps.cache-repo.outputs.cache-hit != 'true'
uses: actions/checkout@v3
with:
persist-credentials: false

# Log in to OpenShift.
# Note: The secrets needed to log in are NOT available if the PR comes from a FORK.
# PR's must originate from a branch off the original repo or else all openshift `oc` commands will fail.
- name: Log in to OpenShift
run: oc login --token=${{ secrets.TOOLS_SA_TOKEN }} --server=https://api.silver.devops.gov.bc.ca:6443

# Clean the app deployment artifacts
- name: Clean APP Deployment
working-directory: "app/.pipeline/"
run: |
npm ci
DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build
DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev

# Clean the database build/deployment artifacts
- name: Clean Database Artifacts
working-directory: "database/.pipeline/"
run: |
npm ci
DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build
DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev

# Clean the api deployment artifacts
- name: Clean API Deployment
working-directory: "api/.pipeline/"
run: |
npm ci
DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=build
DEBUG=* npm run clean -- --pr=$PR_NUMBER --env=dev

# Clean the reamaining build/deployment artifacts
- name: Clean remaining Artifacts
env:
POD_SELECTOR: biohubbc
run: |
oc --namespace af2668-dev get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep $POD_SELECTOR | grep $PR_NUMBER | awk '{print "oc delete --ignore-not-found " $1}' | bash
oc --namespace af2668-tools get all,pvc,secret,pods,ReplicationController,DeploymentConfig,HorizontalPodAutoscaler,imagestreamtag -o name | grep $POD_SELECTOR | grep $PR_NUMBER | awk '{print "oc delete --ignore-not-found " $1}' | bash
21 changes: 18 additions & 3 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,6 @@ db-container: ## Executes into database container.
@echo "Make: Shelling into database container"
@echo "==============================================="
@export PGPASSWORD=$(DB_ADMIN_PASS)
@

app-container: ## Executes into the app container.
@echo "==============================================="
Expand Down Expand Up @@ -329,8 +328,24 @@ log-db-setup: ## Runs `docker logs <container> -f` for the database setup contai
@docker logs $(DOCKER_PROJECT_NAME)-db-setup-$(DOCKER_NAMESPACE)-container -f $(args)

## ------------------------------------------------------------------------------
## Help
## Typescript Trace Commands
## Runs ts-trace to find typescript compilation issues and hotspots
## Docs: https://github.com/microsoft/typescript-analyze-trace
## ------------------------------------------------------------------------------
trace-app:
@echo "==============================================="
@echo "Typscript trace - searching App hotspots"
@echo "==============================================="
@cd app && npx tsc -p ./tsconfig.json --generateTrace ts-traces || npx @typescript/analyze-trace --skipMillis 100 --forceMillis 300 --expandTypes ts-traces

help: ## Display this help screen.
trace-api:
@echo "==============================================="
@echo "Typscript trace - searching for Api hotspots"
@echo "==============================================="
@cd api && npx tsc -p ./tsconfig.json --generateTrace ts-traces || npx @typescript/analyze-trace --skipMillis 100 --forceMillis 300 --expandTypes ts-traces

## ------------------------------------------------------------------------------
## Help
## ------------------------------------------------------------------------------
help: ## Display this help screen.
@grep -h -E '^[0-9a-zA-Z_-]+:.*?##.*$$|^##.*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[33m%-20s\033[0m %s\n", $$1, $$2}' | awk 'BEGIN {FS = "## "}; {printf "\033[36m%-1s\033[0m %s\n", $$2, $$1}'
2 changes: 2 additions & 0 deletions api/.gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,5 @@ coverage
npm-debug.log*
yarn-debug.log*
yarn-error.log*

ts-traces
14 changes: 8 additions & 6 deletions api/.pipeline/config.js
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ const phases = {
instance: `${name}-build-${changeId}`,
version: `${version}-${changeId}`,
tag: tag,
env: 'build',
tz: config.timezone.api,
branch: branch,
cpuRequest: '50m',
Expand All @@ -85,13 +84,14 @@ const phases = {
backboneIntakeEnabled: false,
bctwApiHost: 'https://moe-bctw-api-dev.apps.silver.devops.gov.bc.ca',
critterbaseApiHost: 'https://moe-critterbase-api-dev.apps.silver.devops.gov.bc.ca/api',
env: 'dev',
nodeEnv: 'development',
elasticsearchURL: 'http://es01.a0ec71-dev:9200',
elasticsearchTaxonomyIndex: 'taxonomy_3.0.0',
s3KeyPrefix: (isStaticDeployment && 'sims') || `local/${deployChangeId}/sims`,
tz: config.timezone.api,
sso: config.sso.dev,
logLevel: 'debug',
logLevel: 'silly',
nodeOptions: '--max_old_space_size=1500', // 75% of memoryLimit (bytes)
cpuRequest: '50m',
cpuLimit: '400m',
memoryRequest: '100Mi',
Expand All @@ -117,13 +117,14 @@ const phases = {
backboneIntakeEnabled: false,
bctwApiHost: 'https://moe-bctw-api-test.apps.silver.devops.gov.bc.ca',
critterbaseApiHost: 'https://moe-critterbase-api-test.apps.silver.devops.gov.bc.ca/api',
env: 'test',
nodeEnv: 'production',
elasticsearchURL: 'http://es01.a0ec71-dev:9200',
elasticsearchTaxonomyIndex: 'taxonomy_3.0.0',
s3KeyPrefix: 'sims',
tz: config.timezone.api,
sso: config.sso.test,
logLevel: 'info',
nodeOptions: '--max_old_space_size=2250', // 75% of memoryLimit (bytes)
cpuRequest: '50m',
cpuLimit: '1000m',
memoryRequest: '100Mi',
Expand All @@ -149,13 +150,14 @@ const phases = {
backboneIntakeEnabled: false,
bctwApiHost: 'https://moe-bctw-api-prod.apps.silver.devops.gov.bc.ca',
critterbaseApiHost: 'https://moe-critterbase-api-prod.apps.silver.devops.gov.bc.ca/api',
env: 'prod',
nodeEnv: 'production',
elasticsearchURL: 'http://es01.a0ec71-prod:9200',
elasticsearchTaxonomyIndex: 'taxonomy_3.0.0',
s3KeyPrefix: 'sims',
tz: config.timezone.api,
sso: config.sso.prod,
logLevel: 'info',
logLevel: 'error',
nodeOptions: '--max_old_space_size=2250', // 75% of memoryLimit (bytes)
cpuRequest: '50m',
cpuLimit: '1000m',
memoryRequest: '100Mi',
Expand Down
2 changes: 1 addition & 1 deletion api/.pipeline/lib/api.build.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const path = require('path');
const apiBuild = (settings) => {
const phases = settings.phases;
const options = settings.options;
const phase = 'build';
const phase = settings.phase;

const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options));

Expand Down
6 changes: 4 additions & 2 deletions api/.pipeline/lib/api.deploy.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ const path = require('path');
const apiDeploy = async (settings) => {
const phases = settings.phases;
const options = settings.options;
const phase = options.env;
const phase = settings.options.env;

const oc = new OpenShiftClientX(Object.assign({ namespace: phases[phase].namespace }, options));

Expand All @@ -31,7 +31,9 @@ const apiDeploy = async (settings) => {
HOST: phases[phase].host,
APP_HOST: phases[phase].appHost,
CHANGE_ID: phases.build.changeId || changeId,
NODE_ENV: phases[phase].env,
// Node
NODE_ENV: phases[phase].nodeEnv,
NODE_OPTIONS: phases[phase].nodeOptions,
// BioHub Platform (aka: Backbone)
BACKBONE_API_HOST: phases[phase].backboneApiHost,
BACKBONE_INTAKE_PATH: phases[phase].backboneIntakePath,
Expand Down
2 changes: 1 addition & 1 deletion api/.pipeline/lib/clean.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ const { OpenShiftClientX } = require('pipeline-cli');
const clean = (settings) => {
const phases = settings.phases;
const options = settings.options;
const target_phase = options.env;
const target_phase = options.phase;

const oc = new OpenShiftClientX(Object.assign({ namespace: phases.build.namespace }, options));

Expand Down
2 changes: 1 addition & 1 deletion api/.pipeline/scripts/api.deploy.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const process = require('process');
const { apiDeploy } = require('../lib/api.deploy.js');
const config = require('../config.js');

const settings = { ...config, phase: config.options.env };
const settings = { ...config, phase: config.options.phase };

process.on('unhandledRejection', (reason, promise) => {
console.log('api deploy - unhandled rejection:', promise, 'reason:', reason);
Expand Down
2 changes: 1 addition & 1 deletion api/.pipeline/scripts/clean.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
const { clean } = require('../lib/clean.js');
const config = require('../config.js');

const settings = { ...config, phase: config.options.env };
const settings = { ...config, phase: config.options.phase };

// Cleans all build and deployment artifacts (pods, etc)
clean(settings);
9 changes: 6 additions & 3 deletions api/.pipeline/templates/api.dc.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ parameters:
- name: NODE_ENV
description: Application Environment type variable
required: true
value: 'dev'
value: 'development'
- name: NODE_OPTIONS
- name: API_PORT_DEFAULT
value: '6100'
- name: API_PORT_DEFAULT_NAME
Expand Down Expand Up @@ -206,7 +207,8 @@ objects:
role: api
spec:
containers:
- env:
- name: api
env:
- name: API_HOST
value: ${HOST}
- name: API_PORT
Expand All @@ -219,6 +221,8 @@ objects:
value: ${CHANGE_ID}
- name: NODE_ENV
value: ${NODE_ENV}
- name: NODE_OPTIONS
value: ${NODE_OPTIONS}
# BioHub Platform (aka: Backbone)
- name: BACKBONE_API_HOST
value: ${BACKBONE_API_HOST}
Expand Down Expand Up @@ -342,7 +346,6 @@ objects:
value: ${GCNOTIFY_SMS_URL}
image: ' '
imagePullPolicy: Always
name: api
ports:
- containerPort: ${{API_PORT_DEFAULT}}
protocol: TCP
Expand Down
7 changes: 4 additions & 3 deletions api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
"lint": "eslint . --ignore-pattern 'node_modules' --ext .ts",
"lint-fix": "eslint . --fix --ignore-pattern 'node_modules' --ext .ts",
"format": "prettier --check \"./src/**/*.{js,jsx,ts,tsx,css,scss}\"",
"format-fix": "prettier --write \"./src/**/*.{js,jsx,ts,tsx,json,css,scss}\""
"format-fix": "prettier --write \"./src/**/*.{js,jsx,ts,tsx,json,css,scss}\"",
"fix": "npm-run-all -l -s lint-fix format-fix"
},
"engines": {
"node": ">= 14.0.0",
Expand Down Expand Up @@ -63,7 +64,7 @@
"winston": "~3.3.3",
"xlsx": "https://cdn.sheetjs.com/xlsx-0.19.3/xlsx-0.19.3.tgz",
"xml2js": "~0.4.23",
"zod": "^3.21.4"
"zod": "~3.21.4"
},
"devDependencies": {
"@istanbuljs/nyc-config-typescript": "~1.0.1",
Expand Down Expand Up @@ -97,7 +98,7 @@
"gulp-typescript": "~5.0.1",
"mocha": "~8.4.0",
"nodemon": "~2.0.14",
"npm-run-all": "~4.1.5",
"npm-run-all": "^4.1.5",
"nyc": "~15.1.0",
"prettier": "~2.2.1",
"prettier-plugin-organize-imports": "~2.3.4",
Expand Down
7 changes: 7 additions & 0 deletions api/src/__mocks__/db.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,13 @@ export class MockRes {

return this;
});

headerValue: any;
setHeader = sinon.fake((header: any) => {
this.headerValue = header;

return this;
});
}

/**
Expand Down
5 changes: 3 additions & 2 deletions api/src/database/db-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import {
isBceidBusinessUserInformation,
isDatabaseUserInformation,
isIdirUserInformation,
isServiceClientUserInformation,
KeycloakUserInformation
} from '../utils/keycloak-utils';

Expand Down Expand Up @@ -115,8 +116,8 @@ export const getGenericizedKeycloakUserInformation = (
): GenericizedKeycloakUserInformation | null => {
let data: GenericizedKeycloakUserInformation | null;

if (isDatabaseUserInformation(keycloakUserInformation)) {
// Don't patch internal database user records
if (isDatabaseUserInformation(keycloakUserInformation) || isServiceClientUserInformation(keycloakUserInformation)) {
// Don't patch internal database/service client user records
return null;
}

Expand Down
7 changes: 4 additions & 3 deletions api/src/database/db.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import SQL from 'sql-template-strings';
import { SOURCE_SYSTEM, SYSTEM_IDENTITY_SOURCE } from '../constants/database';
import { ApiExecuteSQLError } from '../errors/api-error';
import { HTTPError } from '../errors/http-error';
import { DatabaseUserInformation, IdirUserInformation, KeycloakUserInformation } from '../utils/keycloak-utils';
import * as db from './db';
import {
getAPIUserDBConnection,
Expand Down Expand Up @@ -42,7 +43,7 @@ describe('db', () => {
describe('getDBConnection', () => {
it('throws an error if keycloak token is undefined', () => {
try {
getDBConnection((null as unknown) as object);
getDBConnection((null as unknown) as KeycloakUserInformation);

expect.fail();
} catch (actualError) {
Expand All @@ -51,15 +52,15 @@ describe('db', () => {
});

it('returns a database connection instance', () => {
const connection = getDBConnection({});
const connection = getDBConnection({} as DatabaseUserInformation);

expect(connection).not.to.be.null;
});

describe('DBConnection', () => {
const sinonSandbox = Sinon.createSandbox();

const mockKeycloakToken = {
const mockKeycloakToken: IdirUserInformation = {
idir_user_guid: 'testguid',
identity_provider: 'idir',
idir_username: 'testuser',
Expand Down
Loading