From 19f24979588d11d4df665bdc0f47805d0fa10f71 Mon Sep 17 00:00:00 2001 From: timbset Date: Wed, 7 Jul 2021 13:43:52 +0300 Subject: [PATCH 01/18] Update PR CI workflow --- .github/workflows/pr-dev.yml | 66 ++++++++++++++++++++---------------- 1 file changed, 36 insertions(+), 30 deletions(-) diff --git a/.github/workflows/pr-dev.yml b/.github/workflows/pr-dev.yml index b01dfeab56..f7913cc6aa 100644 --- a/.github/workflows/pr-dev.yml +++ b/.github/workflows/pr-dev.yml @@ -25,7 +25,8 @@ jobs: uses: actions/cache@v2 with: path: 'node_modules' - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-root + key: ${{ runner.os }}-modules-root-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} + restore-keys: ${{ runner.os }}-modules-root- - name: Cache packages node_modules uses: actions/cache@v2 @@ -33,7 +34,8 @@ jobs: path: | packages/**/node_modules internal/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-packages + key: ${{ runner.os }}-modules-packages-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} + restore-keys: ${{ runner.os }}-modules-packages- - name: Cache examples node_modules uses: actions/cache@v2 @@ -41,13 +43,15 @@ jobs: path: | examples/**/node_modules templates/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-examples + key: ${{ runner.os }}-modules-examples-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} + restore-keys: ${{ runner.os }}-modules-examples- - name: Cache functional tests node_modules uses: actions/cache@v2 with: path: 'functional-tests/**/node_modules' - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-functional-tests + key: ${{ runner.os }}-modules-functional-tests-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} + restore-keys: ${{ runner.os }}-modules-functional-tests- - name: Cache tests and website node_modules uses: actions/cache@v2 @@ -55,7 +59,8 @@ jobs: path: | tests/**/node_modules website/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-tests + key: ${{ runner.os }}-modules-tests-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} + restore-keys: ${{ runner.os }}-modules-tests- - name: Cache build uses: actions/cache@v2 @@ -72,12 +77,13 @@ jobs: internal/**/*.tsbuildinfo !internal/**/node_modules/** .packages/** - key: ${{ runner.os }}-${{ github.run_id }}-build + key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install - run: | - yarn install --frozen-lockfile - yarn validate-lock-file + run: yarn install --frozen-lockfile + + - name: Validate yarn.lock + run: yarn validate-lock-file - name: Run Prettier run: yarn prettier:check @@ -117,7 +123,7 @@ jobs: uses: actions/cache@v2 with: path: 'node_modules' - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-root + key: ${{ runner.os }}-modules-root-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache packages node_modules uses: actions/cache@v2 @@ -125,7 +131,7 @@ jobs: path: | packages/**/node_modules internal/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-packages + key: ${{ runner.os }}-modules-packages-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache tests and website node_modules uses: actions/cache@v2 @@ -133,7 +139,7 @@ jobs: path: | tests/**/node_modules website/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-tests + key: ${{ runner.os }}-modules-tests-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache build uses: actions/cache@v2 @@ -150,7 +156,7 @@ jobs: internal/**/*.tsbuildinfo !internal/**/node_modules/** .packages/** - key: ${{ runner.os }}-${{ github.run_id }}-build + key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install run: | @@ -184,7 +190,7 @@ jobs: uses: actions/cache@v2 with: path: 'node_modules' - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-root + key: ${{ runner.os }}-modules-root-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache packages node_modules uses: actions/cache@v2 @@ -192,7 +198,7 @@ jobs: path: | packages/**/node_modules internal/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-packages + key: ${{ runner.os }}-modules-packages-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache examples node_modules uses: actions/cache@v2 @@ -200,7 +206,7 @@ jobs: path: | examples/**/node_modules templates/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-examples + key: ${{ runner.os }}-modules-examples-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache build uses: actions/cache@v2 @@ -217,7 +223,7 @@ jobs: internal/**/*.tsbuildinfoF !internal/**/node_modules/** .packages/** - key: ${{ runner.os }}-${{ github.run_id }}-build + key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install run: | @@ -249,7 +255,7 @@ jobs: uses: actions/cache@v2 with: path: 'node_modules' - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-root + key: ${{ runner.os }}-modules-root-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache packages node_modules uses: actions/cache@v2 @@ -257,7 +263,7 @@ jobs: path: | packages/**/node_modules internal/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-packages + key: ${{ runner.os }}-modules-packages-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache examples node_modules uses: actions/cache@v2 @@ -265,13 +271,13 @@ jobs: path: | examples/**/node_modules templates/**/node_modules - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-examples + key: ${{ runner.os }}-modules-examples-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache functional tests node_modules uses: actions/cache@v2 with: path: 'functional-tests/**/node_modules' - key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }}-modules-functional-tests + key: ${{ runner.os }}-modules-functional-tests-${{ hashFiles('**/yarn.lock', '!**/node_modules/**') }} - name: Cache build uses: actions/cache@v2 @@ -288,7 +294,7 @@ jobs: internal/**/*.tsbuildinfo !internal/**/node_modules/** .packages/** - key: ${{ runner.os }}-${{ github.run_id }}-build + key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install run: | @@ -327,14 +333,6 @@ jobs: token: ${{ secrets.RESOLVE_BOT_PAT }} scopes: '@resolve-js' - - name: Integration Test PostgreSQL Serverless - env: - AWS_ACCESS_KEY_ID: ${{ secrets.TEST_CLOUD_AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.TEST_CLOUD_AWS_SECRET_ACCESS_KEY }} - AWS_RDS_CLUSTER_ARN: ${{ steps.install_cloud.outputs.system_cluster_arn }} - AWS_RDS_ADMIN_SECRET_ARN: ${{ steps.install_cloud.outputs.postgres_admin_secret_arn }} - run: yarn test:integration-postgres-serverless - - name: Prepare test application run: | test_app_dir=$(mktemp -d -t test-app-XXXXXXXXX) @@ -382,3 +380,11 @@ jobs: run: | cd functional-tests yarn run-test testcafe --url=${{ steps.deploy.outputs.url }} --testcafe-browser=chrome --ci-mode --testcafe-timeout=10000 + + - name: Integration Test PostgreSQL Serverless + env: + AWS_ACCESS_KEY_ID: ${{ secrets.TEST_CLOUD_AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.TEST_CLOUD_AWS_SECRET_ACCESS_KEY }} + AWS_RDS_CLUSTER_ARN: ${{ steps.install_cloud.outputs.system_cluster_arn }} + AWS_RDS_ADMIN_SECRET_ARN: ${{ steps.install_cloud.outputs.postgres_admin_secret_arn }} + run: yarn test:integration-postgres-serverless From daae63ef5a87ef15681e0786b5e928c67407711b Mon Sep 17 00:00:00 2001 From: timbset Date: Wed, 7 Jul 2021 16:31:51 +0300 Subject: [PATCH 02/18] Rework monitoring tests to pass more info into metrics search --- functional-tests/api/monitoring.test.ts | 453 +++++++++++------- packages/runtime/runtime/src/cloud/metrics.js | 2 +- .../runtime/runtime/src/cloud/monitoring.js | 2 +- .../runtime/test/cloud/metrics.test.js | 12 +- .../runtime/test/cloud/monitoring.test.js | 12 +- 5 files changed, 302 insertions(+), 179 deletions(-) diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 4b98a9bbc0..1e5c34acae 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -17,6 +17,11 @@ type BaseMetrics = { } } +interface Dimension { + Name: string + Value: string +} + const nanoid = customAlphabet('0123456789abcdef_', 16) const maxAttempts = 5 const attemptPeriod = 2000 @@ -29,33 +34,30 @@ let startTime: Date let endTime: Date let baseMetrics: BaseMetrics -const getMetricData = async ( - part: string, - ...dimensions: Array -): Promise => { +const getMetricData = async ({ + MetricName, + Stat, + Unit, + Dimensions, +}: { + MetricName: string + Stat: string + Unit: string + Dimensions: Array +}): Promise => { const data = await cw.getMetricData({ MetricDataQueries: [ { Id: `q${nanoid()}`, MetricStat: { Metric: { - Namespace: 'RESOLVE_METRICS', - MetricName: 'Errors', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: part, - }, - ...dimensions, - ], + Namespace: 'ResolveJs', + MetricName, + Dimensions, }, - Stat: 'Sum', + Stat, Period: 31536000, // year - Unit: 'Count', + Unit, }, }, ], @@ -70,55 +72,103 @@ const getMetricData = async ( if (valueCount === 1) { return data.MetricDataResults?.[0]?.Values?.[0] as number } - throw Error(`multiple metric ${part} values received`) + throw Error(`multiple metric ${MetricName} values received`) } const collectBaseMetrics = async (): Promise => { const metrics = await Promise.all([ - getMetricData( - 'ReadModelProjection', - { - Name: 'ReadModel', - Value: 'init-failed', - }, - { - Name: 'EventType', - Value: 'Init', - } - ), - getMetricData( - 'ReadModelProjection', - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'EventType', - Value: 'MONITORING_FAILED_HANDLER', - } - ), - getMetricData( - 'ReadModelResolver', - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverA', - } - ), - getMetricData( - 'ReadModelResolver', - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverB', - } - ), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelProjection', + }, + { + Name: 'ReadModel', + Value: 'init-failed', + }, + { + Name: 'EventType', + Value: 'Init', + }, + ], + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelProjection', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + { + Name: 'EventType', + Value: 'MONITORING_FAILED_HANDLER', + }, + ], + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelResolver', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + { + Name: 'Resolver', + Value: 'resolverA', + }, + ], + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelResolver', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + { + Name: 'Resolver', + Value: 'resolverB', + }, + ], + }), ]) return { @@ -139,18 +189,22 @@ beforeAll(async () => { deploymentId = process.env.RESOLVE_TESTS_TARGET_DEPLOYMENT_ID || '' cw = new CloudWatch({}) client = getClient() - endTime = new Date(Date.now() + 3600000) - startTime = new Date(Date.now() - 360000 * 24) + endTime = new Date(Date.now() + 3600000) // next hour + startTime = new Date(Date.now() - 3600000 * 24) // previous day baseMetrics = await collectBaseMetrics() }) const awaitMetricValue = async ( - part: string, - dimensions: Array, + metricData: { + MetricName: string + Stat: string + Unit: string + Dimensions: Array + }, value: number, attempt = 0 ): Promise => { - const metric = await getMetricData(part, ...dimensions) + const metric = await getMetricData(metricData) if (!isEqual(metric, value)) { if (attempt >= maxAttempts) { @@ -159,125 +213,194 @@ const awaitMetricValue = async ( ) } await new Promise((resolve) => setTimeout(resolve, attemptPeriod)) - await awaitMetricValue(part, dimensions, value, attempt + 1) + + await awaitMetricValue(metricData, value, attempt + 1) } } -test('read model Init handler failed', async () => { - await awaitMetricValue( - 'ReadModelProjection', - [ - { - Name: 'ReadModel', - Value: 'init-failed', - }, +describe('Read Model', () => { + test('read model Init handler failed', async () => { + await awaitMetricValue( { - Name: 'EventType', - Value: 'Init', + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelProjection', + }, + { + Name: 'ReadModel', + Value: 'init-failed', + }, + { + Name: 'EventType', + Value: 'Init', + }, + ], }, - ], - baseMetrics.Errors.readModelProjection.Init - ) -}) + baseMetrics.Errors.readModelProjection.Init + ) + }) -test('read model resolverA failed', async () => { - await expect( - client.query({ - name: 'monitoring', - resolver: 'resolverA', - args: {}, - }) - ).rejects.toBeInstanceOf(Error) + test('read model resolverA failed', async () => { + await expect( + client.query({ + name: 'monitoring', + resolver: 'resolverA', + args: {}, + }) + ).rejects.toBeInstanceOf(Error) - baseMetrics.Errors.readModelResolver.resolverA++ + baseMetrics.Errors.readModelResolver.resolverA++ - await awaitMetricValue( - 'ReadModelResolver', - [ - { - Name: 'ReadModel', - Value: 'monitoring', - }, + await awaitMetricValue( { - Name: 'Resolver', - Value: 'resolverA', + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelResolver', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + { + Name: 'Resolver', + Value: 'resolverA', + }, + ], }, - ], - baseMetrics.Errors.readModelResolver.resolverA - ) + baseMetrics.Errors.readModelResolver.resolverA + ) - await awaitMetricValue( - 'ReadModelResolver', - [ + await awaitMetricValue( { - Name: 'ReadModel', - Value: 'monitoring', + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelResolver', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + ], }, - ], - baseMetrics.Errors.readModelResolver.resolverB + - baseMetrics.Errors.readModelResolver.resolverA - ) -}) + baseMetrics.Errors.readModelResolver.resolverB + + baseMetrics.Errors.readModelResolver.resolverA + ) + }) -test('read model resolverB failed', async () => { - await expect( - client.query({ - name: 'monitoring', - resolver: 'resolverB', - args: {}, - }) - ).rejects.toBeInstanceOf(Error) + test('read model resolverB failed', async () => { + await expect( + client.query({ + name: 'monitoring', + resolver: 'resolverB', + args: {}, + }) + ).rejects.toBeInstanceOf(Error) - baseMetrics.Errors.readModelResolver.resolverB++ + baseMetrics.Errors.readModelResolver.resolverB++ - await awaitMetricValue( - 'ReadModelResolver', - [ + await awaitMetricValue( { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverB', + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'DeploymentId', + Value: deploymentId, + }, + { + Name: 'Part', + Value: 'ReadModelResolver', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + { + Name: 'Resolver', + Value: 'resolverB', + }, + ], }, - ], - baseMetrics.Errors.readModelResolver.resolverB - ) + baseMetrics.Errors.readModelResolver.resolverB + ) - await awaitMetricValue( - 'ReadModelResolver', - [ + await awaitMetricValue( { - Name: 'ReadModel', - Value: 'monitoring', + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'Part', + Value: 'ReadModelResolver', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + ], }, - ], - baseMetrics.Errors.readModelResolver.resolverB + - baseMetrics.Errors.readModelResolver.resolverA - ) -}) - -test('read model event handler failed', async () => { - await client.command({ - aggregateId: 'any', - aggregateName: 'monitoring-aggregate', - type: 'fail', - payload: {}, + baseMetrics.Errors.readModelResolver.resolverB + + baseMetrics.Errors.readModelResolver.resolverA + ) }) - await awaitMetricValue( - 'ReadModelProjection', - [ - { - Name: 'ReadModel', - Value: 'monitoring', - }, + test('read model event handler failed', async () => { + await client.command({ + aggregateId: 'any', + aggregateName: 'monitoring-aggregate', + type: 'fail', + payload: {}, + }) + + baseMetrics.Errors.readModelProjection.EventHandler++ + + await awaitMetricValue( { - Name: 'EventType', - Value: 'MONITORING_FAILED_HANDLER', + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: [ + { + Name: 'Part', + Value: 'ReadModelProjection', + }, + { + Name: 'ReadModel', + Value: 'monitoring', + }, + { + Name: 'EventType', + Value: 'MONITORING_FAILED_HANDLER', + }, + ], }, - ], - baseMetrics.Errors.readModelProjection.EventHandler + 1 - ) + baseMetrics.Errors.readModelProjection.EventHandler + ) + }) }) diff --git a/packages/runtime/runtime/src/cloud/metrics.js b/packages/runtime/runtime/src/cloud/metrics.js index ab61e3d7d0..96db6e03c4 100644 --- a/packages/runtime/runtime/src/cloud/metrics.js +++ b/packages/runtime/runtime/src/cloud/metrics.js @@ -52,7 +52,7 @@ export const putDurationMetrics = async ( Value: duration, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', } if (coldStart) { diff --git a/packages/runtime/runtime/src/cloud/monitoring.js b/packages/runtime/runtime/src/cloud/monitoring.js index e7f3dcf23d..fa3e240f01 100644 --- a/packages/runtime/runtime/src/cloud/monitoring.js +++ b/packages/runtime/runtime/src/cloud/monitoring.js @@ -297,7 +297,7 @@ const monitoringPublish = async (log, monitoringData) => { ) { promises.push( putMetricData({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: monitoringData.metricData.slice(i, i + MAX_METRIC_COUNT), }) ) diff --git a/packages/runtime/runtime/test/cloud/metrics.test.js b/packages/runtime/runtime/test/cloud/metrics.test.js index 9ac368ec68..a0d210157f 100644 --- a/packages/runtime/runtime/test/cloud/metrics.test.js +++ b/packages/runtime/runtime/test/cloud/metrics.test.js @@ -68,7 +68,7 @@ describe('put duration metrics', () => { Value: 2000, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', }) expect(console.info).toBeCalledWith( ['[REQUEST INFO]', 'route', '', 2000].join('\n') @@ -103,7 +103,7 @@ describe('put duration metrics', () => { Value: 2000, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', }) expect(console.info).toBeCalledWith( [ @@ -143,7 +143,7 @@ describe('put duration metrics', () => { Value: 2000, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', }) expect(console.info).toBeCalledWith( [ @@ -183,7 +183,7 @@ describe('put duration metrics', () => { Value: 2000, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', }) expect(console.info).toBeCalledWith( [ @@ -223,7 +223,7 @@ describe('put duration metrics', () => { Value: 2000, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', }) expect(console.info).toBeCalledWith( [ @@ -279,7 +279,7 @@ describe('put duration metrics', () => { Value: 897000, }, ], - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', }) }) }) diff --git a/packages/runtime/runtime/test/cloud/monitoring.test.js b/packages/runtime/runtime/test/cloud/monitoring.test.js index 61f68b2760..769c01e406 100644 --- a/packages/runtime/runtime/test/cloud/monitoring.test.js +++ b/packages/runtime/runtime/test/cloud/monitoring.test.js @@ -37,7 +37,7 @@ describe('common', () => { expect(CloudWatch.putMetricData).toBeCalledTimes(1) expect(CloudWatch.putMetricData).toBeCalledWith({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: expect.any(Array), }) }) @@ -1027,7 +1027,7 @@ describe('time and timeEnd', () => { expect(CloudWatch.putMetricData).toBeCalledTimes(1) expect(CloudWatch.putMetricData).toBeCalledWith({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: expect.any(Array), }) @@ -1267,7 +1267,7 @@ describe('duration', () => { expect(CloudWatch.putMetricData).toBeCalledTimes(1) expect(CloudWatch.putMetricData).toBeCalledWith({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: expect.any(Array), }) @@ -1297,7 +1297,7 @@ describe('duration', () => { expect(CloudWatch.putMetricData).toBeCalledTimes(1) expect(CloudWatch.putMetricData).toBeCalledWith({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: expect.any(Array), }) @@ -1653,7 +1653,7 @@ describe('rate', () => { expect(CloudWatch.putMetricData).toBeCalledTimes(1) expect(CloudWatch.putMetricData).toBeCalledWith({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: expect.any(Array), }) @@ -1682,7 +1682,7 @@ describe('rate', () => { expect(CloudWatch.putMetricData).toBeCalledTimes(1) expect(CloudWatch.putMetricData).toBeCalledWith({ - Namespace: 'RESOLVE_METRICS', + Namespace: 'ResolveJs', MetricData: expect.any(Array), }) From 48494c37eb6275cc0bb8c2d940370762df8ce76a Mon Sep 17 00:00:00 2001 From: timbset Date: Wed, 7 Jul 2021 19:24:50 +0300 Subject: [PATCH 03/18] Improve tests readability. Add commands tests --- functional-tests/api/monitoring.test.ts | 420 ++++++++++-------- .../common/aggregates/monitoring.commands.js | 8 +- 2 files changed, 231 insertions(+), 197 deletions(-) diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 1e5c34acae..4a001396a7 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -6,9 +6,10 @@ import { customAlphabet } from 'nanoid' type BaseMetrics = { Errors: { - readModelProjection: { - Init: number - EventHandler: number + commandPart: number + command: { + failCommandA: number + failCommandB: number } readModelResolver: { resolverA: number @@ -75,111 +76,89 @@ const getMetricData = async ({ throw Error(`multiple metric ${MetricName} values received`) } +const createDimensions = (list: string[]): Dimension[] => + list.map((item) => { + const temp = item.split('=') + + return { + Name: temp[0], + Value: temp[1], + } + }) + const collectBaseMetrics = async (): Promise => { - const metrics = await Promise.all([ + const [ + commandPartMetrics, + failCommandAMetrics, + failCommandBMetrics, + resolverAMetrics, + resolverBMetrics, + ] = await Promise.all([ getMetricData({ MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelProjection', - }, - { - Name: 'ReadModel', - Value: 'init-failed', - }, - { - Name: 'EventType', - Value: 'Init', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + ]), }), getMetricData({ MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelProjection', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'EventType', - Value: 'MONITORING_FAILED_HANDLER', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommandA', + ]), }), getMetricData({ MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelResolver', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverA', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommandB', + ]), }), getMetricData({ MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelResolver', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverB', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=resolverA', + ]), + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=resolverB', + ]), }), ]) return { Errors: { - readModelProjection: { - Init: metrics[0], - EventHandler: metrics[1], + commandPart: commandPartMetrics, + command: { + failCommandA: failCommandAMetrics, + failCommandB: failCommandBMetrics, }, readModelResolver: { - resolverA: metrics[2], - resolverB: metrics[3], + resolverA: resolverAMetrics, + resolverB: resolverBMetrics, }, }, } @@ -208,8 +187,16 @@ const awaitMetricValue = async ( if (!isEqual(metric, value)) { if (attempt >= maxAttempts) { + const lastDimension = + metricData.Dimensions[metricData.Dimensions.length - 1] + const dimensionString = `${lastDimension.Name}=${lastDimension.Value}` + throw Error( - `Metric data mismatch after ${attempt} attempts: expected ${value}, received last ${metric}` + [ + `Metric data mismatch after ${attempt} attempts: `, + `expected ${value}, received last ${metric} `, + `(last dimension: ${dimensionString})`, + ].join('') ) } await new Promise((resolve) => setTimeout(resolve, attemptPeriod)) @@ -218,36 +205,149 @@ const awaitMetricValue = async ( } } -describe('Read Model', () => { +describe('Commands', () => { + test('aggregate commands failed', async () => { + await expect( + client.command({ + aggregateId: 'any', + aggregateName: 'monitoring-aggregate', + type: 'failCommandA', + payload: {}, + }) + ).rejects.toThrowError() + + await expect( + client.command({ + aggregateId: 'any', + aggregateName: 'monitoring-aggregate', + type: 'failCommandB', + payload: {}, + }) + ).rejects.toThrowError() + + baseMetrics.Errors.command.failCommandA += 1 + baseMetrics.Errors.command.failCommandB += 1 + baseMetrics.Errors.commandPart += 2 + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommandA', + ]), + }, + baseMetrics.Errors.command.failCommandA + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommandB', + ]), + }, + baseMetrics.Errors.command.failCommandB + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + ]), + }, + baseMetrics.Errors.command.failCommandA + + baseMetrics.Errors.command.failCommandB + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + ]), + }, + baseMetrics.Errors.command.failCommandA + + baseMetrics.Errors.command.failCommandB + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + ]), + }, + baseMetrics.Errors.commandPart + ) + }) +}) + +describe('Read Model Projection monitoring', () => { test('read model Init handler failed', async () => { await awaitMetricValue( { MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelProjection', - }, - { - Name: 'ReadModel', - Value: 'init-failed', - }, - { - Name: 'EventType', - Value: 'Init', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelProjection', + 'ReadModel=init-failed', + 'EventType=Init', + ]), + }, + 1 + ) + }) + + test('read model event handler failed', async () => { + await client.command({ + aggregateId: 'any', + aggregateName: 'monitoring-aggregate', + type: 'failReadModelProjection', + payload: {}, + }) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Unit: 'Count', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelProjection', + 'ReadModel=monitoring', + 'EventType=MONITORING_FAILED_HANDLER', + ]), }, - baseMetrics.Errors.readModelProjection.Init + 1 ) }) +}) +describe('Read Model Resolver monitoring', () => { test('read model resolverA failed', async () => { await expect( client.query({ @@ -264,24 +364,12 @@ describe('Read Model', () => { MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelResolver', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverA', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=resolverA', + ]), }, baseMetrics.Errors.readModelResolver.resolverA ) @@ -291,20 +379,11 @@ describe('Read Model', () => { MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelResolver', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + ]), }, baseMetrics.Errors.readModelResolver.resolverB + baseMetrics.Errors.readModelResolver.resolverA @@ -327,24 +406,12 @@ describe('Read Model', () => { MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'DeploymentId', - Value: deploymentId, - }, - { - Name: 'Part', - Value: 'ReadModelResolver', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'Resolver', - Value: 'resolverB', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=resolverB', + ]), }, baseMetrics.Errors.readModelResolver.resolverB ) @@ -354,53 +421,14 @@ describe('Read Model', () => { MetricName: 'Errors', Stat: 'Sum', Unit: 'Count', - Dimensions: [ - { - Name: 'Part', - Value: 'ReadModelResolver', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - ], + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + ]), }, baseMetrics.Errors.readModelResolver.resolverB + baseMetrics.Errors.readModelResolver.resolverA ) }) - - test('read model event handler failed', async () => { - await client.command({ - aggregateId: 'any', - aggregateName: 'monitoring-aggregate', - type: 'fail', - payload: {}, - }) - - baseMetrics.Errors.readModelProjection.EventHandler++ - - await awaitMetricValue( - { - MetricName: 'Errors', - Stat: 'Sum', - Unit: 'Count', - Dimensions: [ - { - Name: 'Part', - Value: 'ReadModelProjection', - }, - { - Name: 'ReadModel', - Value: 'monitoring', - }, - { - Name: 'EventType', - Value: 'MONITORING_FAILED_HANDLER', - }, - ], - }, - baseMetrics.Errors.readModelProjection.EventHandler - ) - }) }) diff --git a/functional-tests/app/common/aggregates/monitoring.commands.js b/functional-tests/app/common/aggregates/monitoring.commands.js index e38cb1de6f..f10cabd1af 100644 --- a/functional-tests/app/common/aggregates/monitoring.commands.js +++ b/functional-tests/app/common/aggregates/monitoring.commands.js @@ -1,10 +1,16 @@ import { MONITORING_FAILED_HANDLER } from '../event-types' const aggregate = { - fail: () => ({ + failReadModelProjection: () => ({ type: MONITORING_FAILED_HANDLER, payload: {}, }), + failCommandA: () => { + throw new Error('Test aggregate: command A failed') + }, + failCommandB: () => { + throw new Error('Test aggregate: command B failed') + }, } export default aggregate From 66b7805fad1d3abf689cfb0fbcf7cd68faf3bb21 Mon Sep 17 00:00:00 2001 From: timbset Date: Wed, 7 Jul 2021 23:19:34 +0300 Subject: [PATCH 04/18] Fix command tests --- functional-tests/api/monitoring.test.ts | 180 +++++++++++------- .../common/aggregates/monitoring.commands.js | 7 +- 2 files changed, 117 insertions(+), 70 deletions(-) diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 4a001396a7..a899a43134 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -8,14 +8,27 @@ type BaseMetrics = { Errors: { commandPart: number command: { - failCommandA: number - failCommandB: number + failCommand: number } readModelResolver: { resolverA: number resolverB: number } } + Executions: { + commandPart: number + command: { + failCommand: number + } + } +} + +interface CommandBaseMetrics { + partErrors: number + commandErrors: number + partExecutions: number + commandExecutions: number + executionDurationSamples: number } interface Dimension { @@ -34,16 +47,15 @@ let client: Client let startTime: Date let endTime: Date let baseMetrics: BaseMetrics +let commandBaseMetrics: CommandBaseMetrics const getMetricData = async ({ MetricName, Stat, - Unit, Dimensions, }: { MetricName: string Stat: string - Unit: string Dimensions: Array }): Promise => { const data = await cw.getMetricData({ @@ -58,7 +70,6 @@ const getMetricData = async ({ }, Stat, Period: 31536000, // year - Unit, }, }, ], @@ -89,46 +100,31 @@ const createDimensions = (list: string[]): Dimension[] => const collectBaseMetrics = async (): Promise => { const [ commandPartMetrics, - failCommandAMetrics, - failCommandBMetrics, + failCommandMetrics, resolverAMetrics, resolverBMetrics, ] = await Promise.all([ getMetricData({ MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=Command', - ]), - }), - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', - 'AggregateName=monitoring-aggregate', - 'Type=failCommandA', ]), }), getMetricData({ MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', 'AggregateName=monitoring-aggregate', - 'Type=failCommandB', + 'Type=failCommand', ]), }), getMetricData({ MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', @@ -139,7 +135,6 @@ const collectBaseMetrics = async (): Promise => { getMetricData({ MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', @@ -153,14 +148,85 @@ const collectBaseMetrics = async (): Promise => { Errors: { commandPart: commandPartMetrics, command: { - failCommandA: failCommandAMetrics, - failCommandB: failCommandBMetrics, + failCommand: failCommandMetrics, }, readModelResolver: { resolverA: resolverAMetrics, resolverB: resolverBMetrics, }, }, + Executions: { + commandPart: 0, + command: { + failCommand: 0, + }, + }, + } +} + +const collectBaseCommandMetrics = async (): Promise => { + const [ + partErrors, + commandErrors, + partExecutions, + commandExecutions, + executionDurationSamples, + ] = await Promise.all([ + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + ]), + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommand', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommand', + ]), + }), + getMetricData({ + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommand', + 'Label=Execution', + ]), + }), + ]) + + return { + partErrors, + commandErrors, + partExecutions, + commandExecutions, + executionDurationSamples, } } @@ -177,7 +243,6 @@ const awaitMetricValue = async ( metricData: { MetricName: string Stat: string - Unit: string Dimensions: Array }, value: number, @@ -206,100 +271,91 @@ const awaitMetricValue = async ( } describe('Commands', () => { - test('aggregate commands failed', async () => { - await expect( - client.command({ - aggregateId: 'any', - aggregateName: 'monitoring-aggregate', - type: 'failCommandA', - payload: {}, - }) - ).rejects.toThrowError() + beforeAll(async () => { + commandBaseMetrics = await collectBaseCommandMetrics() + }) + test('aggregate command failed', async () => { await expect( client.command({ aggregateId: 'any', aggregateName: 'monitoring-aggregate', - type: 'failCommandB', + type: 'failCommand', payload: {}, }) ).rejects.toThrowError() - baseMetrics.Errors.command.failCommandA += 1 - baseMetrics.Errors.command.failCommandB += 1 - baseMetrics.Errors.commandPart += 2 + commandBaseMetrics.commandErrors++ + commandBaseMetrics.partErrors++ + commandBaseMetrics.commandExecutions++ + commandBaseMetrics.partExecutions++ + commandBaseMetrics.executionDurationSamples++ await awaitMetricValue( { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', 'AggregateName=monitoring-aggregate', - 'Type=failCommandA', + 'Type=failCommand', ]), }, - baseMetrics.Errors.command.failCommandA + commandBaseMetrics.commandErrors ) await awaitMetricValue( { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', - 'AggregateName=monitoring-aggregate', - 'Type=failCommandB', ]), }, - baseMetrics.Errors.command.failCommandB + commandBaseMetrics.partErrors ) await awaitMetricValue( { - MetricName: 'Errors', + MetricName: 'Executions', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', 'AggregateName=monitoring-aggregate', + 'Type=failCommand', ]), }, - baseMetrics.Errors.command.failCommandA + - baseMetrics.Errors.command.failCommandB + commandBaseMetrics.commandExecutions ) await awaitMetricValue( { - MetricName: 'Errors', + MetricName: 'Executions', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', - 'AggregateName=monitoring-aggregate', ]), }, - baseMetrics.Errors.command.failCommandA + - baseMetrics.Errors.command.failCommandB + commandBaseMetrics.partExecutions ) await awaitMetricValue( { - MetricName: 'Errors', - Stat: 'Sum', - Unit: 'Count', + MetricName: 'Duration', + Stat: 'SampleCount', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=Command', + 'AggregateName=monitoring-aggregate', + 'Type=failCommand', + 'Label=Execution', ]), }, - baseMetrics.Errors.commandPart + commandBaseMetrics.executionDurationSamples ) }) }) @@ -310,7 +366,6 @@ describe('Read Model Projection monitoring', () => { { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelProjection', @@ -334,7 +389,6 @@ describe('Read Model Projection monitoring', () => { { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelProjection', @@ -363,7 +417,6 @@ describe('Read Model Resolver monitoring', () => { { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', @@ -378,7 +431,6 @@ describe('Read Model Resolver monitoring', () => { { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', @@ -405,7 +457,6 @@ describe('Read Model Resolver monitoring', () => { { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', @@ -420,7 +471,6 @@ describe('Read Model Resolver monitoring', () => { { MetricName: 'Errors', Stat: 'Sum', - Unit: 'Count', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', diff --git a/functional-tests/app/common/aggregates/monitoring.commands.js b/functional-tests/app/common/aggregates/monitoring.commands.js index f10cabd1af..90746e2370 100644 --- a/functional-tests/app/common/aggregates/monitoring.commands.js +++ b/functional-tests/app/common/aggregates/monitoring.commands.js @@ -5,11 +5,8 @@ const aggregate = { type: MONITORING_FAILED_HANDLER, payload: {}, }), - failCommandA: () => { - throw new Error('Test aggregate: command A failed') - }, - failCommandB: () => { - throw new Error('Test aggregate: command B failed') + failCommand: () => { + throw new Error('Test aggregate: command failed') }, } From 97db16266bfb17d6973cc433810bf185156822ed Mon Sep 17 00:00:00 2001 From: timbset Date: Wed, 7 Jul 2021 23:37:06 +0300 Subject: [PATCH 05/18] Improve read model resolver tests --- functional-tests/api/monitoring.test.ts | 144 ++++++++++++++---- .../read-models/monitoring.resolvers.js | 7 +- 2 files changed, 116 insertions(+), 35 deletions(-) diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index a899a43134..8602637585 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -11,7 +11,7 @@ type BaseMetrics = { failCommand: number } readModelResolver: { - resolverA: number + resolver: number resolverB: number } } @@ -31,6 +31,15 @@ interface CommandBaseMetrics { executionDurationSamples: number } +type ReadModelResolverBaseMetrics = { + partErrors: number + resolverErrors: number + resolverBErrors: number + partExecutions: number + resolverExecutions: number + executionDurationSamples: number +} + interface Dimension { Name: string Value: string @@ -48,6 +57,7 @@ let startTime: Date let endTime: Date let baseMetrics: BaseMetrics let commandBaseMetrics: CommandBaseMetrics +let readModelResolverBaseMetrics: ReadModelResolverBaseMetrics const getMetricData = async ({ MetricName, @@ -101,7 +111,7 @@ const collectBaseMetrics = async (): Promise => { const [ commandPartMetrics, failCommandMetrics, - resolverAMetrics, + resolverMetrics, resolverBMetrics, ] = await Promise.all([ getMetricData({ @@ -129,7 +139,7 @@ const collectBaseMetrics = async (): Promise => { `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', 'ReadModel=monitoring', - 'Resolver=resolverA', + 'Resolver=resolver', ]), }), getMetricData({ @@ -151,7 +161,7 @@ const collectBaseMetrics = async (): Promise => { failCommand: failCommandMetrics, }, readModelResolver: { - resolverA: resolverAMetrics, + resolver: resolverMetrics, resolverB: resolverBMetrics, }, }, @@ -230,6 +240,73 @@ const collectBaseCommandMetrics = async (): Promise => { } } +const collectReadModelResolverBaseMetrics = async (): Promise => { + const [ + partErrors, + resolverErrors, + partExecutions, + resolverExecutions, + executionDurationSamples, + ] = await Promise.all([ + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + ]), + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=failResolver', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=failResolver', + ]), + }), + getMetricData({ + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=failResolver', + 'Label=Execution', + ]), + }), + ]) + + return { + partErrors, + resolverErrors, + resolverBErrors: 0, + partExecutions, + resolverExecutions, + executionDurationSamples, + } +} + beforeAll(async () => { deploymentId = process.env.RESOLVE_TESTS_TARGET_DEPLOYMENT_ID || '' cw = new CloudWatch({}) @@ -402,16 +479,24 @@ describe('Read Model Projection monitoring', () => { }) describe('Read Model Resolver monitoring', () => { - test('read model resolverA failed', async () => { + beforeAll(async () => { + readModelResolverBaseMetrics = await collectReadModelResolverBaseMetrics() + }) + + test('read model resolver failed', async () => { await expect( client.query({ name: 'monitoring', - resolver: 'resolverA', + resolver: 'failResolver', args: {}, }) ).rejects.toBeInstanceOf(Error) - baseMetrics.Errors.readModelResolver.resolverA++ + readModelResolverBaseMetrics.resolverErrors++ + readModelResolverBaseMetrics.partErrors++ + readModelResolverBaseMetrics.resolverExecutions++ + readModelResolverBaseMetrics.partExecutions++ + readModelResolverBaseMetrics.executionDurationSamples++ await awaitMetricValue( { @@ -421,10 +506,10 @@ describe('Read Model Resolver monitoring', () => { `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', 'ReadModel=monitoring', - 'Resolver=resolverA', + 'Resolver=failResolver', ]), }, - baseMetrics.Errors.readModelResolver.resolverA + readModelResolverBaseMetrics.resolverErrors ) await awaitMetricValue( @@ -434,51 +519,50 @@ describe('Read Model Resolver monitoring', () => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', - 'ReadModel=monitoring', ]), }, - baseMetrics.Errors.readModelResolver.resolverB + - baseMetrics.Errors.readModelResolver.resolverA + readModelResolverBaseMetrics.partErrors ) - }) - - test('read model resolverB failed', async () => { - await expect( - client.query({ - name: 'monitoring', - resolver: 'resolverB', - args: {}, - }) - ).rejects.toBeInstanceOf(Error) - - baseMetrics.Errors.readModelResolver.resolverB++ await awaitMetricValue( { - MetricName: 'Errors', + MetricName: 'Executions', Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', 'ReadModel=monitoring', - 'Resolver=resolverB', + 'Resolver=failResolver', ]), }, - baseMetrics.Errors.readModelResolver.resolverB + readModelResolverBaseMetrics.resolverExecutions ) await awaitMetricValue( { - MetricName: 'Errors', + MetricName: 'Executions', Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelResolver', + ]), + }, + readModelResolverBaseMetrics.partExecutions + ) + + await awaitMetricValue( + { + MetricName: 'Duration', + Stat: 'SampleCount', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelResolver', 'ReadModel=monitoring', + 'Resolver=failResolver', + 'Label=Execution', ]), }, - baseMetrics.Errors.readModelResolver.resolverB + - baseMetrics.Errors.readModelResolver.resolverA + readModelResolverBaseMetrics.executionDurationSamples ) }) }) diff --git a/functional-tests/app/common/read-models/monitoring.resolvers.js b/functional-tests/app/common/read-models/monitoring.resolvers.js index f9e367dffc..52ae81752e 100644 --- a/functional-tests/app/common/read-models/monitoring.resolvers.js +++ b/functional-tests/app/common/read-models/monitoring.resolvers.js @@ -1,9 +1,6 @@ const resolvers = { - resolverA: async () => { - throw Error('Test read model: resolverA failure') - }, - resolverB: async () => { - throw Error('Test read model: resolverB failure') + failResolver: async () => { + throw Error('Test read model: failResolver failure') }, } export default resolvers From cd714b751cce97ebf0b67bf0caaa3734023b23e8 Mon Sep 17 00:00:00 2001 From: timbset Date: Thu, 8 Jul 2021 00:39:10 +0300 Subject: [PATCH 06/18] Implement internal errors tests --- .eslintrc.js | 1 + .github/workflows/pr-dev.yml | 2 + functional-tests/api/monitoring.test.ts | 172 ++++++++++++++++++------ functional-tests/package.json | 1 + yarn.lock | 38 ++++++ 5 files changed, 176 insertions(+), 38 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index 323425f1cb..d3c72485ba 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -468,6 +468,7 @@ module.exports = { 'undef', 'unfetch', 'unicode', + 'Uint8Array', 'unlink', 'unmarshall', 'unmocked', diff --git a/.github/workflows/pr-dev.yml b/.github/workflows/pr-dev.yml index f7913cc6aa..e20378abf5 100644 --- a/.github/workflows/pr-dev.yml +++ b/.github/workflows/pr-dev.yml @@ -372,6 +372,8 @@ jobs: AWS_SECRET_ACCESS_KEY: ${{ secrets.TEST_CLOUD_AWS_SECRET_ACCESS_KEY }} AWS_REGION: eu-central-1 RESOLVE_TESTS_TARGET_DEPLOYMENT_ID: ${{ steps.deploy.outputs.id }} + RESOLVE_TESTS_TARGET_VERSION: ${{ steps.publish.outputs.version }} + RESOLVE_TESTS_TARGET_STAGE: framework-test run: | cd functional-tests yarn run-test api --url=${{ steps.deploy.outputs.url }} diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 8602637585..c431c09753 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -1,8 +1,10 @@ import { Client } from '@resolve-js/client' import { CloudWatch } from '@aws-sdk/client-cloudwatch' +import { Lambda } from '@aws-sdk/client-lambda' import { getClient } from '../utils/utils' import { isEqual } from 'lodash' import { customAlphabet } from 'nanoid' +import { parse as parseVersion } from 'semver' type BaseMetrics = { Errors: { @@ -34,12 +36,16 @@ interface CommandBaseMetrics { type ReadModelResolverBaseMetrics = { partErrors: number resolverErrors: number - resolverBErrors: number partExecutions: number resolverExecutions: number executionDurationSamples: number } +interface InternalBaseMetrics { + partErrors: number + globalErrors: number +} + interface Dimension { Name: string Value: string @@ -52,12 +58,14 @@ const attemptPeriod = 2000 // eslint-disable-next-line spellcheck/spell-checker let deploymentId: string let cw: CloudWatch +let lambda: Lambda let client: Client let startTime: Date let endTime: Date let baseMetrics: BaseMetrics let commandBaseMetrics: CommandBaseMetrics let readModelResolverBaseMetrics: ReadModelResolverBaseMetrics +let internalBaseMetrics: InternalBaseMetrics const getMetricData = async ({ MetricName, @@ -174,12 +182,12 @@ const collectBaseMetrics = async (): Promise => { } } -const collectBaseCommandMetrics = async (): Promise => { +const collectReadModelResolverBaseMetrics = async (): Promise => { const [ partErrors, - commandErrors, + resolverErrors, partExecutions, - commandExecutions, + resolverExecutions, executionDurationSamples, ] = await Promise.all([ getMetricData({ @@ -187,7 +195,7 @@ const collectBaseCommandMetrics = async (): Promise => { Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=Command', + 'Part=ReadModelResolver', ]), }), getMetricData({ @@ -195,9 +203,9 @@ const collectBaseCommandMetrics = async (): Promise => { Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=Command', - 'AggregateName=monitoring-aggregate', - 'Type=failCommand', + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=failResolver', ]), }), getMetricData({ @@ -205,7 +213,7 @@ const collectBaseCommandMetrics = async (): Promise => { Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=Command', + 'Part=ReadModelResolver', ]), }), getMetricData({ @@ -213,9 +221,9 @@ const collectBaseCommandMetrics = async (): Promise => { Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=Command', - 'AggregateName=monitoring-aggregate', - 'Type=failCommand', + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=failResolver', ]), }), getMetricData({ @@ -223,9 +231,9 @@ const collectBaseCommandMetrics = async (): Promise => { Stat: 'SampleCount', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=Command', - 'AggregateName=monitoring-aggregate', - 'Type=failCommand', + 'Part=ReadModelResolver', + 'ReadModel=monitoring', + 'Resolver=failResolver', 'Label=Execution', ]), }), @@ -233,19 +241,19 @@ const collectBaseCommandMetrics = async (): Promise => { return { partErrors, - commandErrors, + resolverErrors, partExecutions, - commandExecutions, + resolverExecutions, executionDurationSamples, } } -const collectReadModelResolverBaseMetrics = async (): Promise => { +const collectCommandBaseMetrics = async (): Promise => { const [ partErrors, - resolverErrors, + commandErrors, partExecutions, - resolverExecutions, + commandExecutions, executionDurationSamples, ] = await Promise.all([ getMetricData({ @@ -253,7 +261,7 @@ const collectReadModelResolverBaseMetrics = async (): Promise => { + const [partErrors, globalErrors] = await Promise.all([ + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Internal', + ]), + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions(['Part=Internal']), + }), + ]) + + return { + partErrors, + globalErrors, + } +} + beforeAll(async () => { deploymentId = process.env.RESOLVE_TESTS_TARGET_DEPLOYMENT_ID || '' cw = new CloudWatch({}) + lambda = new Lambda({}) client = getClient() endTime = new Date(Date.now() + 3600000) // next hour startTime = new Date(Date.now() - 3600000 * 24) // previous day @@ -347,9 +378,27 @@ const awaitMetricValue = async ( } } -describe('Commands', () => { +const getFunctionName = () => { + const version = process.env.RESOLVE_TESTS_TARGET_VERSION + const parsedVersion = parseVersion(version) + + if (parsedVersion == null) { + throw new Error(`Parse version "${version}" failed`) + } + + return [ + 'app', + deploymentId, + process.env.RESOLVE_TESTS_TARGET_STAGE, + parsedVersion.major, + parsedVersion.minor, + 'x', + ].join('-') +} + +describe('Commands metrics', () => { beforeAll(async () => { - commandBaseMetrics = await collectBaseCommandMetrics() + commandBaseMetrics = await collectCommandBaseMetrics() }) test('aggregate command failed', async () => { @@ -437,7 +486,7 @@ describe('Commands', () => { }) }) -describe('Read Model Projection monitoring', () => { +describe('Read Model Projection metrics', () => { test('read model Init handler failed', async () => { await awaitMetricValue( { @@ -478,7 +527,7 @@ describe('Read Model Projection monitoring', () => { }) }) -describe('Read Model Resolver monitoring', () => { +describe('Read Model Resolver metrics', () => { beforeAll(async () => { readModelResolverBaseMetrics = await collectReadModelResolverBaseMetrics() }) @@ -566,3 +615,50 @@ describe('Read Model Resolver monitoring', () => { ) }) }) + +describe('Internal metrics', () => { + beforeAll(async () => { + internalBaseMetrics = await collectInternalBaseMetrics() + }) + + test('collects errors thrown in lambda worker', async () => { + const json = JSON.stringify({ + key: 'monitoring-test', + }) + + const payload = new Uint8Array(json.length) + + for (let i = 0; i < json.length; i++) { + payload[i] = json.charCodeAt(i) + } + + await lambda.invoke({ + FunctionName: getFunctionName(), + Payload: payload, + }) + + internalBaseMetrics.partErrors++ + internalBaseMetrics.globalErrors++ + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=Internal', + ]), + }, + internalBaseMetrics.partErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions(['Part=Internal']), + }, + internalBaseMetrics.globalErrors + ) + }) +}) diff --git a/functional-tests/package.json b/functional-tests/package.json index 1ea24a7f48..46858f20f5 100644 --- a/functional-tests/package.json +++ b/functional-tests/package.json @@ -29,6 +29,7 @@ }, "devDependencies": { "@aws-sdk/client-cloudwatch": "3.11.0", + "@aws-sdk/client-lambda": "3.11.0", "@types/fs-extra": "8.1.0", "@types/isomorphic-fetch": "0.0.35", "@types/nanoid": "2.1.0", diff --git a/yarn.lock b/yarn.lock index afdf85d3ed..b590afd6ba 100644 --- a/yarn.lock +++ b/yarn.lock @@ -282,6 +282,44 @@ fast-xml-parser "3.19.0" tslib "^2.0.0" +"@aws-sdk/client-lambda@3.11.0": + version "3.11.0" + resolved "https://registry.yarnpkg.com/@aws-sdk/client-lambda/-/client-lambda-3.11.0.tgz#8121e631ee073f9cea60f80e0f6553c0afdf772e" + integrity sha512-uXYR7TpLfDGfwhsm5/cA91GeykxDlU3y9fDu9pKdaxl3+UnF5jqletSz/EItnpVHVStoaZik4ChCeS7WQBzX6A== + dependencies: + "@aws-crypto/sha256-browser" "^1.0.0" + "@aws-crypto/sha256-js" "^1.0.0" + "@aws-sdk/config-resolver" "3.10.0" + "@aws-sdk/credential-provider-node" "3.11.0" + "@aws-sdk/fetch-http-handler" "3.10.0" + "@aws-sdk/hash-node" "3.10.0" + "@aws-sdk/invalid-dependency" "3.10.0" + "@aws-sdk/middleware-content-length" "3.10.0" + "@aws-sdk/middleware-host-header" "3.10.0" + "@aws-sdk/middleware-logger" "3.10.0" + "@aws-sdk/middleware-retry" "3.10.0" + "@aws-sdk/middleware-serde" "3.10.0" + "@aws-sdk/middleware-signing" "3.10.0" + "@aws-sdk/middleware-stack" "3.10.0" + "@aws-sdk/middleware-user-agent" "3.10.0" + "@aws-sdk/node-config-provider" "3.10.0" + "@aws-sdk/node-http-handler" "3.10.0" + "@aws-sdk/protocol-http" "3.10.0" + "@aws-sdk/smithy-client" "3.10.0" + "@aws-sdk/types" "3.10.0" + "@aws-sdk/url-parser" "3.10.0" + "@aws-sdk/url-parser-native" "3.10.0" + "@aws-sdk/util-base64-browser" "3.10.0" + "@aws-sdk/util-base64-node" "3.10.0" + "@aws-sdk/util-body-length-browser" "3.10.0" + "@aws-sdk/util-body-length-node" "3.10.0" + "@aws-sdk/util-user-agent-browser" "3.10.0" + "@aws-sdk/util-user-agent-node" "3.10.0" + "@aws-sdk/util-utf8-browser" "3.10.0" + "@aws-sdk/util-utf8-node" "3.10.0" + "@aws-sdk/util-waiter" "3.10.0" + tslib "^2.0.0" + "@aws-sdk/client-sso@3.11.0": version "3.11.0" resolved "https://registry.yarnpkg.com/@aws-sdk/client-sso/-/client-sso-3.11.0.tgz#1586e72438c017a81a0ad2ed3facc027aa364cea" From 58f0e51e663f4569294e80e847df05dccd60b3e5 Mon Sep 17 00:00:00 2001 From: timbset Date: Thu, 8 Jul 2021 01:08:03 +0300 Subject: [PATCH 07/18] Implement api handler tests --- functional-tests/api/monitoring.test.ts | 160 +++++++++++++++++- .../app/common/api-handlers/fail-api.js | 5 + functional-tests/app/config.app.js | 5 + .../runtime/src/cloud/wrap-api-handler.js | 5 +- 4 files changed, 173 insertions(+), 2 deletions(-) create mode 100644 functional-tests/app/common/api-handlers/fail-api.js diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index c431c09753..2d771e7366 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -1,7 +1,7 @@ import { Client } from '@resolve-js/client' import { CloudWatch } from '@aws-sdk/client-cloudwatch' import { Lambda } from '@aws-sdk/client-lambda' -import { getClient } from '../utils/utils' +import { getClient, getTargetURL } from '../utils/utils' import { isEqual } from 'lodash' import { customAlphabet } from 'nanoid' import { parse as parseVersion } from 'semver' @@ -41,6 +41,14 @@ type ReadModelResolverBaseMetrics = { executionDurationSamples: number } +type ApiHandlerBaseMetrics = { + partErrors: number + apiHandlerErrors: number + partExecutions: number + apiHandlerExecutions: number + executionDurationSamples: number +} + interface InternalBaseMetrics { partErrors: number globalErrors: number @@ -65,6 +73,7 @@ let endTime: Date let baseMetrics: BaseMetrics let commandBaseMetrics: CommandBaseMetrics let readModelResolverBaseMetrics: ReadModelResolverBaseMetrics +let apiHandlerBaseMetrics: ApiHandlerBaseMetrics let internalBaseMetrics: InternalBaseMetrics const getMetricData = async ({ @@ -314,6 +323,72 @@ const collectCommandBaseMetrics = async (): Promise => { } } +const collectApiHandlerBaseMetrics = async (): Promise => { + const [ + partErrors, + apiHandlerErrors, + partExecutions, + apiHandlerExecutions, + executionDurationSamples, + ] = await Promise.all([ + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + ]), + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + 'Path=/api/fail-api', + 'Method=GET', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + 'Path=/api/fail-api', + 'Method=GET', + ]), + }), + getMetricData({ + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + 'Path=/api/fail-api', + 'Method=GET', + 'Label=Execution', + ]), + }), + ]) + + return { + partErrors, + apiHandlerErrors, + partExecutions, + apiHandlerExecutions, + executionDurationSamples, + } +} + const collectInternalBaseMetrics = async (): Promise => { const [partErrors, globalErrors] = await Promise.all([ getMetricData({ @@ -616,6 +691,89 @@ describe('Read Model Resolver metrics', () => { }) }) +describe('Api Handler metrics', () => { + beforeAll(async () => { + apiHandlerBaseMetrics = await collectApiHandlerBaseMetrics() + }) + + test('api handler failed', async () => { + await fetch(`${getTargetURL()}/api/fail-api`) + + apiHandlerBaseMetrics.apiHandlerErrors++ + apiHandlerBaseMetrics.partErrors++ + apiHandlerBaseMetrics.apiHandlerExecutions++ + apiHandlerBaseMetrics.partExecutions++ + apiHandlerBaseMetrics.executionDurationSamples++ + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + 'Path=/api/fail-api', + 'Method=GET', + ]), + }, + apiHandlerBaseMetrics.apiHandlerErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + ]), + }, + apiHandlerBaseMetrics.partErrors + ) + + await awaitMetricValue( + { + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + 'Path=/api/fail-api', + 'Method=GET', + ]), + }, + apiHandlerBaseMetrics.apiHandlerExecutions + ) + + await awaitMetricValue( + { + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + ]), + }, + apiHandlerBaseMetrics.partExecutions + ) + + await awaitMetricValue( + { + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ApiHandler', + 'Path=/api/fail-api', + 'Method=GET', + 'Label=Execution' + ]), + }, + apiHandlerBaseMetrics.executionDurationSamples + ) + }) +}) + describe('Internal metrics', () => { beforeAll(async () => { internalBaseMetrics = await collectInternalBaseMetrics() diff --git a/functional-tests/app/common/api-handlers/fail-api.js b/functional-tests/app/common/api-handlers/fail-api.js new file mode 100644 index 0000000000..923316b8cf --- /dev/null +++ b/functional-tests/app/common/api-handlers/fail-api.js @@ -0,0 +1,5 @@ +const failApi = () => { + throw new Error('Test API: handler failed') +} + +export default failApi diff --git a/functional-tests/app/config.app.js b/functional-tests/app/config.app.js index 92538065be..ca61ce9e91 100644 --- a/functional-tests/app/config.app.js +++ b/functional-tests/app/config.app.js @@ -123,6 +123,11 @@ const appConfig = { version: '@resolve-js/runtime/lib/common/utils/interop-options.js', }, apiHandlers: [ + { + handler: 'common/api-handlers/fail-api.js', + path: '/api/fail-api', + method: 'GET', + }, { handler: '@resolve-js/runtime/lib/local/query-is-ready-handler.js', path: '/api/query-is-ready', diff --git a/packages/runtime/runtime/src/cloud/wrap-api-handler.js b/packages/runtime/runtime/src/cloud/wrap-api-handler.js index 5f606888e7..6973623e95 100644 --- a/packages/runtime/runtime/src/cloud/wrap-api-handler.js +++ b/packages/runtime/runtime/src/cloud/wrap-api-handler.js @@ -337,7 +337,10 @@ const wrapApiHandler = (handler, getCustomParameters, monitoring) => async ( req = await createRequest(lambdaEvent, customParameters) if (monitoring != null) { - pathMonitoring = monitoring.group({ Path: req.path }) + pathMonitoring = monitoring + .group({ Path: req.path }) + .group({ Method: req.method }) + pathMonitoring.time('Execution', startTimestamp) } From ccad1ac06309443be01729c3f3aeeaf0fbd6595e Mon Sep 17 00:00:00 2001 From: timbset Date: Thu, 8 Jul 2021 01:21:10 +0300 Subject: [PATCH 08/18] Delete redundant metrics --- functional-tests/api/monitoring.test.ts | 88 ------------------------- 1 file changed, 88 deletions(-) diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 2d771e7366..f0a77f5158 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -6,25 +6,6 @@ import { isEqual } from 'lodash' import { customAlphabet } from 'nanoid' import { parse as parseVersion } from 'semver' -type BaseMetrics = { - Errors: { - commandPart: number - command: { - failCommand: number - } - readModelResolver: { - resolver: number - resolverB: number - } - } - Executions: { - commandPart: number - command: { - failCommand: number - } - } -} - interface CommandBaseMetrics { partErrors: number commandErrors: number @@ -70,7 +51,6 @@ let lambda: Lambda let client: Client let startTime: Date let endTime: Date -let baseMetrics: BaseMetrics let commandBaseMetrics: CommandBaseMetrics let readModelResolverBaseMetrics: ReadModelResolverBaseMetrics let apiHandlerBaseMetrics: ApiHandlerBaseMetrics @@ -124,73 +104,6 @@ const createDimensions = (list: string[]): Dimension[] => } }) -const collectBaseMetrics = async (): Promise => { - const [ - commandPartMetrics, - failCommandMetrics, - resolverMetrics, - resolverBMetrics, - ] = await Promise.all([ - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=Command', - ]), - }), - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=Command', - 'AggregateName=monitoring-aggregate', - 'Type=failCommand', - ]), - }), - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=ReadModelResolver', - 'ReadModel=monitoring', - 'Resolver=resolver', - ]), - }), - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=ReadModelResolver', - 'ReadModel=monitoring', - 'Resolver=resolverB', - ]), - }), - ]) - - return { - Errors: { - commandPart: commandPartMetrics, - command: { - failCommand: failCommandMetrics, - }, - readModelResolver: { - resolver: resolverMetrics, - resolverB: resolverBMetrics, - }, - }, - Executions: { - commandPart: 0, - command: { - failCommand: 0, - }, - }, - } -} - const collectReadModelResolverBaseMetrics = async (): Promise => { const [ partErrors, @@ -419,7 +332,6 @@ beforeAll(async () => { client = getClient() endTime = new Date(Date.now() + 3600000) // next hour startTime = new Date(Date.now() - 3600000 * 24) // previous day - baseMetrics = await collectBaseMetrics() }) const awaitMetricValue = async ( From 83590030868f5dc34563b1630cc33c344eff2d9a Mon Sep 17 00:00:00 2001 From: timbset Date: Thu, 8 Jul 2021 18:40:41 +0300 Subject: [PATCH 09/18] Add view model resolver tests --- functional-tests/api/monitoring.test.ts | 156 +++++++++++++++++- .../view-models/monitoring.projection.js | 5 + .../common/view-models/monitoring.resolver.js | 5 + functional-tests/app/config.app.js | 5 + functional-tests/utils/utils.ts | 11 ++ 5 files changed, 179 insertions(+), 3 deletions(-) create mode 100644 functional-tests/app/common/view-models/monitoring.projection.js create mode 100644 functional-tests/app/common/view-models/monitoring.resolver.js diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index f0a77f5158..8394978de6 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -14,7 +14,7 @@ interface CommandBaseMetrics { executionDurationSamples: number } -type ReadModelResolverBaseMetrics = { +type ResolverBaseMetrics = { partErrors: number resolverErrors: number partExecutions: number @@ -52,7 +52,8 @@ let client: Client let startTime: Date let endTime: Date let commandBaseMetrics: CommandBaseMetrics -let readModelResolverBaseMetrics: ReadModelResolverBaseMetrics +let readModelResolverBaseMetrics: ResolverBaseMetrics +let viewModelResolverBaseMetrics: ResolverBaseMetrics let apiHandlerBaseMetrics: ApiHandlerBaseMetrics let internalBaseMetrics: InternalBaseMetrics @@ -104,7 +105,7 @@ const createDimensions = (list: string[]): Dimension[] => } }) -const collectReadModelResolverBaseMetrics = async (): Promise => { +const collectReadModelResolverBaseMetrics = async (): Promise => { const [ partErrors, resolverErrors, @@ -170,6 +171,69 @@ const collectReadModelResolverBaseMetrics = async (): Promise => { + const [ + partErrors, + resolverErrors, + partExecutions, + resolverExecutions, + executionDurationSamples, + ] = await Promise.all([ + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + ]), + }), + getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + ]), + }), + getMetricData({ + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + ]), + }), + getMetricData({ + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + 'Label=Execution', + ]), + }), + ]) + + return { + partErrors, + resolverErrors, + partExecutions, + resolverExecutions, + executionDurationSamples, + } +} + const collectCommandBaseMetrics = async (): Promise => { const [ partErrors, @@ -603,6 +667,92 @@ describe('Read Model Resolver metrics', () => { }) }) +describe('View Model resolver metrics', () => { + beforeAll(async () => { + viewModelResolverBaseMetrics = await collectViewModelResolverBaseMetrics() + }) + + test('view model resolver failed', async () => { + await expect( + client.query({ + name: 'monitoring-view-model', + aggregateIds: ['test-aggregate'], + args: {}, + }) + ).rejects.toBeInstanceOf(Error) + + viewModelResolverBaseMetrics.resolverErrors++ + viewModelResolverBaseMetrics.partErrors++ + viewModelResolverBaseMetrics.resolverExecutions++ + viewModelResolverBaseMetrics.partExecutions++ + viewModelResolverBaseMetrics.executionDurationSamples++ + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + ]), + }, + viewModelResolverBaseMetrics.resolverErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + ]), + }, + viewModelResolverBaseMetrics.partErrors + ) + + await awaitMetricValue( + { + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + ]), + }, + viewModelResolverBaseMetrics.resolverExecutions + ) + + await awaitMetricValue( + { + MetricName: 'Executions', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + ]), + }, + viewModelResolverBaseMetrics.partExecutions + ) + + await awaitMetricValue( + { + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + 'Label=Execution', + ]), + }, + viewModelResolverBaseMetrics.executionDurationSamples + ) + }) +}) + describe('Api Handler metrics', () => { beforeAll(async () => { apiHandlerBaseMetrics = await collectApiHandlerBaseMetrics() diff --git a/functional-tests/app/common/view-models/monitoring.projection.js b/functional-tests/app/common/view-models/monitoring.projection.js new file mode 100644 index 0000000000..3a4aae0e78 --- /dev/null +++ b/functional-tests/app/common/view-models/monitoring.projection.js @@ -0,0 +1,5 @@ +const projection = { + Init: () => null, +} + +export default projection diff --git a/functional-tests/app/common/view-models/monitoring.resolver.js b/functional-tests/app/common/view-models/monitoring.resolver.js new file mode 100644 index 0000000000..eaa3118f92 --- /dev/null +++ b/functional-tests/app/common/view-models/monitoring.resolver.js @@ -0,0 +1,5 @@ +const resolver = () => { + throw new Error('Test error: view model resolver failed') +} + +export default resolver diff --git a/functional-tests/app/config.app.js b/functional-tests/app/config.app.js index ca61ce9e91..0a2868f6e5 100644 --- a/functional-tests/app/config.app.js +++ b/functional-tests/app/config.app.js @@ -106,6 +106,11 @@ const appConfig = { projection: 'common/view-models/custom-aggregate-ids.projection.js', resolver: 'common/view-models/custom-aggregate-ids.resolver.js', }, + { + name: 'monitoring-view-model', + projection: 'common/view-models/monitoring.projection.js', + resolver: 'common/view-models/monitoring.resolver.js', + }, ], sagas: [ { diff --git a/functional-tests/utils/utils.ts b/functional-tests/utils/utils.ts index 3387a37f59..69d3f45da1 100644 --- a/functional-tests/utils/utils.ts +++ b/functional-tests/utils/utils.ts @@ -1,5 +1,8 @@ import { getClient as getClientInternal, Context } from '@resolve-js/client' +import monitoringResolver from '../app/common/view-models/monitoring.resolver' +import monitoringProjection from '../app/common/view-models/monitoring.projection' + export const getTargetURL = () => process.env.RESOLVE_TESTS_TARGET_URL || 'http://0.0.0.0:3000' @@ -7,6 +10,14 @@ const buildContext = (contextOverrides: any): Context => ({ origin: getTargetURL(), rootPath: '', staticPath: 'static', + viewModels: [ + { + name: 'monitoring-view-model', + resolver: monitoringResolver, + projection: monitoringProjection, + deserializeState: (state) => JSON.parse(state), + }, + ], ...contextOverrides, }) From 078cf7b9b9d9cea2fe3dc42d1b751c9b71dbdbcb Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 13:43:53 +0300 Subject: [PATCH 10/18] Add view model projection tests --- functional-tests/api/monitoring.test.ts | 240 ++++++++++++++++-- .../common/aggregates/monitoring.commands.js | 9 +- functional-tests/app/common/event-types.js | 1 + .../view-models/init-failed.projection.js | 7 + .../view-models/monitoring.projection.js | 5 + functional-tests/app/config.app.js | 4 + functional-tests/utils/utils.ts | 8 + 7 files changed, 254 insertions(+), 20 deletions(-) create mode 100644 functional-tests/app/common/view-models/init-failed.projection.js diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 8394978de6..6005aadda4 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -22,6 +22,18 @@ type ResolverBaseMetrics = { executionDurationSamples: number } +type ViewModelBaseMetrics = { + resolverPartErrors: number + resolverErrors: number + resolverInitErrors: number + projectionPartErrors: number + projectionInitErrors: number + projectionErrors: number + resolverPartExecutions: number + resolverExecutions: number + executionDurationSamples: number +} + type ApiHandlerBaseMetrics = { partErrors: number apiHandlerErrors: number @@ -53,7 +65,7 @@ let startTime: Date let endTime: Date let commandBaseMetrics: CommandBaseMetrics let readModelResolverBaseMetrics: ResolverBaseMetrics -let viewModelResolverBaseMetrics: ResolverBaseMetrics +let viewModelBaseMetrics: ViewModelBaseMetrics let apiHandlerBaseMetrics: ApiHandlerBaseMetrics let internalBaseMetrics: InternalBaseMetrics @@ -171,13 +183,17 @@ const collectReadModelResolverBaseMetrics = async (): Promise => { +const collectViewModelBaseMetrics = async (): Promise => { const [ - partErrors, + resolverPartErrors, resolverErrors, - partExecutions, + resolverInitErrors, + resolverPartExecutions, resolverExecutions, executionDurationSamples, + projectionPartErrors, + projectionErrors, + projectionInitErrors, ] = await Promise.all([ getMetricData({ MetricName: 'Errors', @@ -196,6 +212,15 @@ const collectViewModelResolverBaseMetrics = async (): Promise { lambda = new Lambda({}) client = getClient() endTime = new Date(Date.now() + 3600000) // next hour - startTime = new Date(Date.now() - 3600000 * 24) // previous day + startTime = new Date(Date.now() - 3600000 * 24 * 7) // previous day }) const awaitMetricValue = async ( @@ -575,6 +630,34 @@ describe('Read Model Projection metrics', () => { }, 1 ) + + await awaitMetricValue( + { + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelProjection', + 'ReadModel=monitoring', + 'Label=EventApply', + ]), + }, + 1 + ) + + await awaitMetricValue( + { + MetricName: 'Duration', + Stat: 'SampleCount', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ReadModelProjection', + 'ReadModel=monitoring', + 'Label=EventProjectionApply', + ]), + }, + 1 + ) }) }) @@ -667,9 +750,9 @@ describe('Read Model Resolver metrics', () => { }) }) -describe('View Model resolver metrics', () => { +describe('View Model metrics', () => { beforeAll(async () => { - viewModelResolverBaseMetrics = await collectViewModelResolverBaseMetrics() + viewModelBaseMetrics = await collectViewModelBaseMetrics() }) test('view model resolver failed', async () => { @@ -681,11 +764,11 @@ describe('View Model resolver metrics', () => { }) ).rejects.toBeInstanceOf(Error) - viewModelResolverBaseMetrics.resolverErrors++ - viewModelResolverBaseMetrics.partErrors++ - viewModelResolverBaseMetrics.resolverExecutions++ - viewModelResolverBaseMetrics.partExecutions++ - viewModelResolverBaseMetrics.executionDurationSamples++ + viewModelBaseMetrics.resolverErrors++ + viewModelBaseMetrics.resolverPartErrors++ + viewModelBaseMetrics.resolverExecutions++ + viewModelBaseMetrics.resolverPartExecutions++ + viewModelBaseMetrics.executionDurationSamples++ await awaitMetricValue( { @@ -697,7 +780,7 @@ describe('View Model resolver metrics', () => { 'ViewModel=monitoring-view-model', ]), }, - viewModelResolverBaseMetrics.resolverErrors + viewModelBaseMetrics.resolverErrors ) await awaitMetricValue( @@ -709,7 +792,7 @@ describe('View Model resolver metrics', () => { 'Part=ViewModelResolver', ]), }, - viewModelResolverBaseMetrics.partErrors + viewModelBaseMetrics.resolverPartErrors ) await awaitMetricValue( @@ -722,7 +805,7 @@ describe('View Model resolver metrics', () => { 'ViewModel=monitoring-view-model', ]), }, - viewModelResolverBaseMetrics.resolverExecutions + viewModelBaseMetrics.resolverExecutions ) await awaitMetricValue( @@ -734,7 +817,7 @@ describe('View Model resolver metrics', () => { 'Part=ViewModelResolver', ]), }, - viewModelResolverBaseMetrics.partExecutions + viewModelBaseMetrics.resolverPartExecutions ) await awaitMetricValue( @@ -748,7 +831,126 @@ describe('View Model resolver metrics', () => { 'Label=Execution', ]), }, - viewModelResolverBaseMetrics.executionDurationSamples + viewModelBaseMetrics.executionDurationSamples + ) + }) + + test('view model Init handler failed', async () => { + await expect( + client.query({ + name: 'init-failed-view-model', + aggregateIds: ['test-aggregate'], + args: {}, + }) + ).rejects.toThrowError() + + viewModelBaseMetrics.resolverInitErrors++ + viewModelBaseMetrics.resolverPartErrors++ + viewModelBaseMetrics.resolverPartExecutions++ + viewModelBaseMetrics.executionDurationSamples++ + viewModelBaseMetrics.projectionPartErrors++ + viewModelBaseMetrics.projectionInitErrors++ + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=init-failed-view-model', + ]), + }, + viewModelBaseMetrics.resolverInitErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelProjection', + 'ViewModel=init-failed-view-model', + 'EventType=Init', + ]), + }, + viewModelBaseMetrics.projectionInitErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelProjection', + ]), + }, + viewModelBaseMetrics.projectionPartErrors + ) + }) + + test('view model event handler failed', async () => { + await client.command({ + aggregateId: 'fail-aggregate', + aggregateName: 'monitoring-aggregate', + type: 'failReadModelProjection', + payload: {}, + }) + + await expect( + client.query({ + name: 'monitoring-view-model', + aggregateIds: ['fail-aggregate'], + args: {}, + }) + ).rejects.toBeInstanceOf(Error) + + viewModelBaseMetrics.resolverErrors++ + viewModelBaseMetrics.resolverPartErrors++ + viewModelBaseMetrics.resolverPartExecutions++ + viewModelBaseMetrics.executionDurationSamples++ + viewModelBaseMetrics.projectionPartErrors++ + viewModelBaseMetrics.projectionErrors++ + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelResolver', + 'ViewModel=monitoring-view-model', + ]), + }, + viewModelBaseMetrics.resolverErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelProjection', + 'ViewModel=monitoring-view-model', + 'EventType=MONITORING_VIEW_MODEL_FAILED', + ]), + }, + viewModelBaseMetrics.projectionErrors + ) + + await awaitMetricValue( + { + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelProjection', + ]), + }, + viewModelBaseMetrics.projectionPartErrors ) }) }) diff --git a/functional-tests/app/common/aggregates/monitoring.commands.js b/functional-tests/app/common/aggregates/monitoring.commands.js index 90746e2370..40b331626a 100644 --- a/functional-tests/app/common/aggregates/monitoring.commands.js +++ b/functional-tests/app/common/aggregates/monitoring.commands.js @@ -1,10 +1,17 @@ -import { MONITORING_FAILED_HANDLER } from '../event-types' +import { + MONITORING_FAILED_HANDLER, + MONITORING_VIEW_MODEL_FAILED, +} from '../event-types' const aggregate = { failReadModelProjection: () => ({ type: MONITORING_FAILED_HANDLER, payload: {}, }), + failViewModelProjection: () => ({ + type: MONITORING_VIEW_MODEL_FAILED, + payload: {}, + }), failCommand: () => { throw new Error('Test aggregate: command failed') }, diff --git a/functional-tests/app/common/event-types.js b/functional-tests/app/common/event-types.js index 9520c680ac..26cb23b24d 100644 --- a/functional-tests/app/common/event-types.js +++ b/functional-tests/app/common/event-types.js @@ -8,3 +8,4 @@ export const TEST_SCENARIO_RETRY_ON_ERROR_UNBLOCKED = export const TEST_SCENARIO_RETRY_ON_ERROR_COMPLETED = 'TEST_SCENARIO_RETRY_ON_ERROR_COMPLETED' export const MONITORING_FAILED_HANDLER = 'MONITORING_FAILED_HANDLER' +export const MONITORING_VIEW_MODEL_FAILED = 'MONITORING_VIEW_MODEL_FAILED' diff --git a/functional-tests/app/common/view-models/init-failed.projection.js b/functional-tests/app/common/view-models/init-failed.projection.js new file mode 100644 index 0000000000..7c905996dd --- /dev/null +++ b/functional-tests/app/common/view-models/init-failed.projection.js @@ -0,0 +1,7 @@ +const initFailed = { + Init: () => { + throw new Error('Test error: init failed') + }, +} + +export default initFailed diff --git a/functional-tests/app/common/view-models/monitoring.projection.js b/functional-tests/app/common/view-models/monitoring.projection.js index 3a4aae0e78..6bae652e3e 100644 --- a/functional-tests/app/common/view-models/monitoring.projection.js +++ b/functional-tests/app/common/view-models/monitoring.projection.js @@ -1,5 +1,10 @@ +import { MONITORING_VIEW_MODEL_FAILED } from '../event-types' + const projection = { Init: () => null, + [MONITORING_VIEW_MODEL_FAILED]: () => { + throw new Error('Test error: view model projection failed') + }, } export default projection diff --git a/functional-tests/app/config.app.js b/functional-tests/app/config.app.js index 0a2868f6e5..2df2156701 100644 --- a/functional-tests/app/config.app.js +++ b/functional-tests/app/config.app.js @@ -111,6 +111,10 @@ const appConfig = { projection: 'common/view-models/monitoring.projection.js', resolver: 'common/view-models/monitoring.resolver.js', }, + { + name: 'init-failed-view-model', + projection: 'common/view-models/init-failed.projection.js', + }, ], sagas: [ { diff --git a/functional-tests/utils/utils.ts b/functional-tests/utils/utils.ts index 69d3f45da1..bb70c78413 100644 --- a/functional-tests/utils/utils.ts +++ b/functional-tests/utils/utils.ts @@ -3,6 +3,8 @@ import { getClient as getClientInternal, Context } from '@resolve-js/client' import monitoringResolver from '../app/common/view-models/monitoring.resolver' import monitoringProjection from '../app/common/view-models/monitoring.projection' +import initFailedProjection from '../app/common/view-models/init-failed.projection' + export const getTargetURL = () => process.env.RESOLVE_TESTS_TARGET_URL || 'http://0.0.0.0:3000' @@ -17,6 +19,12 @@ const buildContext = (contextOverrides: any): Context => ({ projection: monitoringProjection, deserializeState: (state) => JSON.parse(state), }, + { + name: 'init-failed-view-model', + projection: initFailedProjection, + resolver: () => void 0, + deserializeState: (state) => JSON.parse(state), + }, ], ...contextOverrides, }) From 30734e801be02ac8b8e8ecc24ea3ed27707386a5 Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 13:51:30 +0300 Subject: [PATCH 11/18] Add yarn.lock change after install check --- .github/workflows/pr-dev.yml | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pr-dev.yml b/.github/workflows/pr-dev.yml index e20378abf5..4c66af6bc3 100644 --- a/.github/workflows/pr-dev.yml +++ b/.github/workflows/pr-dev.yml @@ -80,7 +80,17 @@ jobs: key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install - run: yarn install --frozen-lockfile + run: yarn install + + - name: Check if yarn.lock changed + id: git-diff + uses: technote-space/get-diff-action@v4 + with: + FILES: yarn.lock + + - name: Fail job if yarn.lock changed\ + if: steps.git-diff.output.diff + run: exit 1 - name: Validate yarn.lock run: yarn validate-lock-file From b56f465bd6b6dcdd7a7d1fd63759818ef81d3d8c Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 13:52:06 +0300 Subject: [PATCH 12/18] Delete one package from yarn.lock --- yarn.lock | 7 ------- 1 file changed, 7 deletions(-) diff --git a/yarn.lock b/yarn.lock index b590afd6ba..c7030a4372 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,13 +2,6 @@ # yarn lockfile v1 -"@algolia/autocomplete-core@1.0.0-alpha.44": - version "1.0.0-alpha.44" - resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.0.0-alpha.44.tgz#e626dba45f5f3950d6beb0ab055395ef0f7e8bb2" - integrity sha512-2iMXthldMIDXtlbg9omRKLgg1bLo2ZzINAEqwhNjUeyj1ceEyL1ck6FY0VnJpf2LsjmNthHCz2BuFk+nYUeDNA== - dependencies: - "@algolia/autocomplete-shared" "1.0.0-alpha.44" - "@algolia/autocomplete-preset-algolia@1.0.0-alpha.44": version "1.0.0-alpha.44" resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.0.0-alpha.44.tgz#0ea0b255d0be10fbe262e281472dd6e4619b62ba" From aec0765495fd6eda23e02bead6a64c332c132979 Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 14:15:23 +0300 Subject: [PATCH 13/18] Improve yarn.lock check --- .github/workflows/pr-dev.yml | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/pr-dev.yml b/.github/workflows/pr-dev.yml index 4c66af6bc3..7967811dc5 100644 --- a/.github/workflows/pr-dev.yml +++ b/.github/workflows/pr-dev.yml @@ -80,21 +80,21 @@ jobs: key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install - run: yarn install + run: yarn install --ignore-scripts - name: Check if yarn.lock changed - id: git-diff - uses: technote-space/get-diff-action@v4 - with: - FILES: yarn.lock + run: echo "git_diff=$(git diff --name-only yarn.lock)" >> $GITHUB_ENV - - name: Fail job if yarn.lock changed\ - if: steps.git-diff.output.diff + - name: Fail job if yarn.lock changed + if: env.git_diff run: exit 1 - name: Validate yarn.lock run: yarn validate-lock-file + - name: Build + run: yarn prepare + - name: Run Prettier run: yarn prettier:check From da9f628fec490e9a85f268ded4e335f146840cca Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 14:21:08 +0300 Subject: [PATCH 14/18] Revert "Delete one package from yarn.lock" This reverts commit b56f465b --- yarn.lock | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/yarn.lock b/yarn.lock index c7030a4372..b590afd6ba 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2,6 +2,13 @@ # yarn lockfile v1 +"@algolia/autocomplete-core@1.0.0-alpha.44": + version "1.0.0-alpha.44" + resolved "https://registry.yarnpkg.com/@algolia/autocomplete-core/-/autocomplete-core-1.0.0-alpha.44.tgz#e626dba45f5f3950d6beb0ab055395ef0f7e8bb2" + integrity sha512-2iMXthldMIDXtlbg9omRKLgg1bLo2ZzINAEqwhNjUeyj1ceEyL1ck6FY0VnJpf2LsjmNthHCz2BuFk+nYUeDNA== + dependencies: + "@algolia/autocomplete-shared" "1.0.0-alpha.44" + "@algolia/autocomplete-preset-algolia@1.0.0-alpha.44": version "1.0.0-alpha.44" resolved "https://registry.yarnpkg.com/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.0.0-alpha.44.tgz#0ea0b255d0be10fbe262e281472dd6e4619b62ba" From 54f3f2a45306169aef1aeae9b021cf150f96f657 Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 14:53:50 +0300 Subject: [PATCH 15/18] Run prettier --- .prettierignore | 2 ++ functional-tests/api/monitoring.test.ts | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/.prettierignore b/.prettierignore index b07dd215ee..e5a6cb6306 100644 --- a/.prettierignore +++ b/.prettierignore @@ -4,8 +4,10 @@ packages/**/dist examples/**/dist examples/**/dist-replica examples/**/lib +templates/**/dist examples/shopping-list-advanced/native/resolve/ packages/core/zeromq/optional **/*.d.ts packages/core/react-hooks/src/index.ts website/** +functional-tests/app/**/dist diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index 6005aadda4..f9b44bcd98 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -1030,7 +1030,7 @@ describe('Api Handler metrics', () => { 'Part=ApiHandler', 'Path=/api/fail-api', 'Method=GET', - 'Label=Execution' + 'Label=Execution', ]), }, apiHandlerBaseMetrics.executionDurationSamples From 73b58204fa0727cd8550bedc88999fd5b9e5059f Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 16:46:14 +0300 Subject: [PATCH 16/18] Fix lint --- .eslintrc.js | 2 +- examples/ts/hacker-news/client/components/Comment.tsx | 2 +- .../adapters/readmodel-adapters/readmodel-base/src/index.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.eslintrc.js b/.eslintrc.js index d3c72485ba..5506a169fe 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -468,7 +468,7 @@ module.exports = { 'undef', 'unfetch', 'unicode', - 'Uint8Array', + 'uint', 'unlink', 'unmarshall', 'unmocked', diff --git a/examples/ts/hacker-news/client/components/Comment.tsx b/examples/ts/hacker-news/client/components/Comment.tsx index b7d8674616..ee07ff1fe9 100644 --- a/examples/ts/hacker-news/client/components/Comment.tsx +++ b/examples/ts/hacker-news/client/components/Comment.tsx @@ -1,4 +1,4 @@ -import React, { useState, useCallback } from 'react' +import React, { useState } from 'react' import sanitizer from 'sanitizer' import styled from 'styled-components' diff --git a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/index.ts b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/index.ts index c82796ad88..7d5558d73c 100644 --- a/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/index.ts +++ b/packages/runtime/adapters/readmodel-adapters/readmodel-base/src/index.ts @@ -1,3 +1,3 @@ -export { default as default } from './create-adapter' +export { default } from './create-adapter' export { default as splitNestedPath } from './split-nested-path' export * from './types' From 06bff8ea9cf8277b349fdaba7a38a9d2cae8c8ed Mon Sep 17 00:00:00 2001 From: timbset Date: Fri, 9 Jul 2021 17:01:39 +0300 Subject: [PATCH 17/18] Fix CI --- .github/workflows/pr-dev.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/.github/workflows/pr-dev.yml b/.github/workflows/pr-dev.yml index 7967811dc5..2f00d72b1f 100644 --- a/.github/workflows/pr-dev.yml +++ b/.github/workflows/pr-dev.yml @@ -80,7 +80,7 @@ jobs: key: ${{ runner.os }}-build-${{ github.run_id }} - name: Install - run: yarn install --ignore-scripts + run: yarn install - name: Check if yarn.lock changed run: echo "git_diff=$(git diff --name-only yarn.lock)" >> $GITHUB_ENV @@ -92,9 +92,6 @@ jobs: - name: Validate yarn.lock run: yarn validate-lock-file - - name: Build - run: yarn prepare - - name: Run Prettier run: yarn prettier:check From 5a3f4da561198c72357e56d614b162f8b5680b5a Mon Sep 17 00:00:00 2001 From: timbset Date: Mon, 12 Jul 2021 23:17:48 +0300 Subject: [PATCH 18/18] Fix view models monitoring functional tests --- functional-tests/api/monitoring.test.ts | 178 ++++++++++-------- .../common/aggregates/monitoring.commands.js | 5 + functional-tests/app/common/event-types.js | 1 + .../read-models/monitoring.projection.js | 10 +- .../view-models/resolver-failed.projection.js | 5 + ...esolver.js => resolver-failed.resolver.js} | 0 functional-tests/app/config.app.js | 5 + functional-tests/utils/utils.ts | 11 +- .../get-view-models-interop-builder.ts | 25 ++- 9 files changed, 144 insertions(+), 96 deletions(-) create mode 100644 functional-tests/app/common/view-models/resolver-failed.projection.js rename functional-tests/app/common/view-models/{monitoring.resolver.js => resolver-failed.resolver.js} (100%) diff --git a/functional-tests/api/monitoring.test.ts b/functional-tests/api/monitoring.test.ts index f9b44bcd98..5fcce6e2ae 100644 --- a/functional-tests/api/monitoring.test.ts +++ b/functional-tests/api/monitoring.test.ts @@ -22,16 +22,21 @@ type ResolverBaseMetrics = { executionDurationSamples: number } -type ViewModelBaseMetrics = { - resolverPartErrors: number +type SingleViewModelBaseMetrics = { resolverErrors: number - resolverInitErrors: number - projectionPartErrors: number - projectionInitErrors: number + resolverExecutions: number + resolverExecutionDurationSamples: number projectionErrors: number +} + +type ViewModelBaseMetrics = { + resolverPartErrors: number resolverPartExecutions: number - resolverExecutions: number - executionDurationSamples: number + projectionPartErrors: number + + monitoring: SingleViewModelBaseMetrics + initFailed: SingleViewModelBaseMetrics + resolverFailed: SingleViewModelBaseMetrics } type ApiHandlerBaseMetrics = { @@ -183,17 +188,15 @@ const collectReadModelResolverBaseMetrics = async (): Promise => { +const collectSingViewModelBaseMetrics = async ( + name: string, + skipProjectionErrors = false +): Promise => { const [ - resolverPartErrors, resolverErrors, - resolverInitErrors, - resolverPartExecutions, resolverExecutions, - executionDurationSamples, - projectionPartErrors, + resolverExecutionDurationSamples, projectionErrors, - projectionInitErrors, ] = await Promise.all([ getMetricData({ MetricName: 'Errors', @@ -201,24 +204,7 @@ const collectViewModelBaseMetrics = async (): Promise => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelResolver', - ]), - }), - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=ViewModelResolver', - 'ViewModel=monitoring-view-model', - ]), - }), - getMetricData({ - MetricName: 'Errors', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=ViewModelResolver', - 'ViewModel=init-failed-view-model', + `ViewModel=${name}`, ]), }), getMetricData({ @@ -227,15 +213,7 @@ const collectViewModelBaseMetrics = async (): Promise => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelResolver', - ]), - }), - getMetricData({ - MetricName: 'Executions', - Stat: 'Sum', - Dimensions: createDimensions([ - `DeploymentId=${deploymentId}`, - 'Part=ViewModelResolver', - 'ViewModel=monitoring-view-model', + `ViewModel=${name}`, ]), }), getMetricData({ @@ -244,25 +222,54 @@ const collectViewModelBaseMetrics = async (): Promise => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelResolver', - 'ViewModel=monitoring-view-model', + `ViewModel=${name}`, 'Label=Execution', ]), }), + !skipProjectionErrors + ? getMetricData({ + MetricName: 'Errors', + Stat: 'Sum', + Dimensions: createDimensions([ + `DeploymentId=${deploymentId}`, + 'Part=ViewModelProjection', + `ViewModel=${name}`, + ]), + }) + : 0, + ]) + + return { + resolverErrors, + resolverExecutions, + resolverExecutionDurationSamples, + projectionErrors, + } +} + +const collectViewModelBaseMetrics = async (): Promise => { + const [ + resolverPartErrors, + resolverPartExecutions, + projectionPartErrors, + initFailed, + resolverFailed, + monitoring, + ] = await Promise.all([ getMetricData({ MetricName: 'Errors', Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=ViewModelProjection', + 'Part=ViewModelResolver', ]), }), getMetricData({ - MetricName: 'Errors', + MetricName: 'Executions', Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, - 'Part=ViewModelProjection', - 'ViewModel=monitoring-view-model', + 'Part=ViewModelResolver', ]), }), getMetricData({ @@ -271,21 +278,20 @@ const collectViewModelBaseMetrics = async (): Promise => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelProjection', - 'ViewModel=init-failed-view-model', ]), }), + collectSingViewModelBaseMetrics('init-failed-view-model'), + collectSingViewModelBaseMetrics('resolver-failed-view-model', true), + collectSingViewModelBaseMetrics('monitoring-view-model'), ]) return { resolverPartErrors, - resolverErrors, - resolverInitErrors, resolverPartExecutions, - resolverExecutions, - executionDurationSamples, projectionPartErrors, - projectionErrors, - projectionInitErrors, + initFailed, + monitoring, + resolverFailed, } } @@ -609,7 +615,14 @@ describe('Read Model Projection metrics', () => { ) }) - test('read model event handler failed', async () => { + test('read model event handler', async () => { + await client.command({ + aggregateId: 'any', + aggregateName: 'monitoring-aggregate', + type: 'executeReadModelProjection', + payload: {}, + }) + await client.command({ aggregateId: 'any', aggregateName: 'monitoring-aggregate', @@ -619,13 +632,13 @@ describe('Read Model Projection metrics', () => { await awaitMetricValue( { - MetricName: 'Errors', - Stat: 'Sum', + MetricName: 'Duration', + Stat: 'SampleCount', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelProjection', 'ReadModel=monitoring', - 'EventType=MONITORING_FAILED_HANDLER', + 'Label=EventApply', ]), }, 1 @@ -639,7 +652,7 @@ describe('Read Model Projection metrics', () => { `DeploymentId=${deploymentId}`, 'Part=ReadModelProjection', 'ReadModel=monitoring', - 'Label=EventApply', + 'Label=EventProjectionApply', ]), }, 1 @@ -647,13 +660,13 @@ describe('Read Model Projection metrics', () => { await awaitMetricValue( { - MetricName: 'Duration', - Stat: 'SampleCount', + MetricName: 'Errors', + Stat: 'Sum', Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ReadModelProjection', 'ReadModel=monitoring', - 'Label=EventProjectionApply', + 'EventType=MONITORING_FAILED_HANDLER', ]), }, 1 @@ -758,17 +771,17 @@ describe('View Model metrics', () => { test('view model resolver failed', async () => { await expect( client.query({ - name: 'monitoring-view-model', + name: 'resolver-failed-view-model', aggregateIds: ['test-aggregate'], args: {}, }) ).rejects.toBeInstanceOf(Error) - viewModelBaseMetrics.resolverErrors++ viewModelBaseMetrics.resolverPartErrors++ - viewModelBaseMetrics.resolverExecutions++ viewModelBaseMetrics.resolverPartExecutions++ - viewModelBaseMetrics.executionDurationSamples++ + viewModelBaseMetrics.resolverFailed.resolverErrors++ + viewModelBaseMetrics.resolverFailed.resolverExecutions++ + viewModelBaseMetrics.resolverFailed.resolverExecutionDurationSamples++ await awaitMetricValue( { @@ -777,10 +790,10 @@ describe('View Model metrics', () => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelResolver', - 'ViewModel=monitoring-view-model', + 'ViewModel=resolver-failed-view-model', ]), }, - viewModelBaseMetrics.resolverErrors + viewModelBaseMetrics.resolverFailed.resolverErrors ) await awaitMetricValue( @@ -802,10 +815,10 @@ describe('View Model metrics', () => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelResolver', - 'ViewModel=monitoring-view-model', + 'ViewModel=resolver-failed-view-model', ]), }, - viewModelBaseMetrics.resolverExecutions + viewModelBaseMetrics.resolverFailed.resolverExecutions ) await awaitMetricValue( @@ -827,11 +840,11 @@ describe('View Model metrics', () => { Dimensions: createDimensions([ `DeploymentId=${deploymentId}`, 'Part=ViewModelResolver', - 'ViewModel=monitoring-view-model', + 'ViewModel=resolver-failed-view-model', 'Label=Execution', ]), }, - viewModelBaseMetrics.executionDurationSamples + viewModelBaseMetrics.resolverFailed.resolverExecutionDurationSamples ) }) @@ -844,12 +857,12 @@ describe('View Model metrics', () => { }) ).rejects.toThrowError() - viewModelBaseMetrics.resolverInitErrors++ viewModelBaseMetrics.resolverPartErrors++ viewModelBaseMetrics.resolverPartExecutions++ - viewModelBaseMetrics.executionDurationSamples++ viewModelBaseMetrics.projectionPartErrors++ - viewModelBaseMetrics.projectionInitErrors++ + viewModelBaseMetrics.initFailed.resolverErrors++ + viewModelBaseMetrics.initFailed.resolverExecutions++ + viewModelBaseMetrics.initFailed.projectionErrors++ await awaitMetricValue( { @@ -861,7 +874,7 @@ describe('View Model metrics', () => { 'ViewModel=init-failed-view-model', ]), }, - viewModelBaseMetrics.resolverInitErrors + viewModelBaseMetrics.initFailed.resolverErrors ) await awaitMetricValue( @@ -875,7 +888,7 @@ describe('View Model metrics', () => { 'EventType=Init', ]), }, - viewModelBaseMetrics.projectionInitErrors + viewModelBaseMetrics.initFailed.projectionErrors ) await awaitMetricValue( @@ -895,7 +908,7 @@ describe('View Model metrics', () => { await client.command({ aggregateId: 'fail-aggregate', aggregateName: 'monitoring-aggregate', - type: 'failReadModelProjection', + type: 'failViewModelProjection', payload: {}, }) @@ -907,12 +920,13 @@ describe('View Model metrics', () => { }) ).rejects.toBeInstanceOf(Error) - viewModelBaseMetrics.resolverErrors++ viewModelBaseMetrics.resolverPartErrors++ viewModelBaseMetrics.resolverPartExecutions++ - viewModelBaseMetrics.executionDurationSamples++ viewModelBaseMetrics.projectionPartErrors++ - viewModelBaseMetrics.projectionErrors++ + viewModelBaseMetrics.monitoring.resolverErrors++ + viewModelBaseMetrics.monitoring.resolverExecutions++ + viewModelBaseMetrics.monitoring.projectionErrors++ + viewModelBaseMetrics.monitoring.resolverExecutionDurationSamples++ await awaitMetricValue( { @@ -924,7 +938,7 @@ describe('View Model metrics', () => { 'ViewModel=monitoring-view-model', ]), }, - viewModelBaseMetrics.resolverErrors + viewModelBaseMetrics.monitoring.resolverErrors ) await awaitMetricValue( @@ -938,7 +952,7 @@ describe('View Model metrics', () => { 'EventType=MONITORING_VIEW_MODEL_FAILED', ]), }, - viewModelBaseMetrics.projectionErrors + viewModelBaseMetrics.monitoring.projectionErrors ) await awaitMetricValue( diff --git a/functional-tests/app/common/aggregates/monitoring.commands.js b/functional-tests/app/common/aggregates/monitoring.commands.js index 40b331626a..74b2e60b57 100644 --- a/functional-tests/app/common/aggregates/monitoring.commands.js +++ b/functional-tests/app/common/aggregates/monitoring.commands.js @@ -1,9 +1,14 @@ import { + MONITORING_EXECUTED_HANDLER, MONITORING_FAILED_HANDLER, MONITORING_VIEW_MODEL_FAILED, } from '../event-types' const aggregate = { + executeReadModelProjection: () => ({ + type: MONITORING_EXECUTED_HANDLER, + payload: {}, + }), failReadModelProjection: () => ({ type: MONITORING_FAILED_HANDLER, payload: {}, diff --git a/functional-tests/app/common/event-types.js b/functional-tests/app/common/event-types.js index 26cb23b24d..d1ae8b2d21 100644 --- a/functional-tests/app/common/event-types.js +++ b/functional-tests/app/common/event-types.js @@ -7,5 +7,6 @@ export const TEST_SCENARIO_RETRY_ON_ERROR_UNBLOCKED = 'TEST_SCENARIO_RETRY_ON_ERROR_UNBLOCKED' export const TEST_SCENARIO_RETRY_ON_ERROR_COMPLETED = 'TEST_SCENARIO_RETRY_ON_ERROR_COMPLETED' +export const MONITORING_EXECUTED_HANDLER = 'MONITORING_EXECUTED_HANDLER' export const MONITORING_FAILED_HANDLER = 'MONITORING_FAILED_HANDLER' export const MONITORING_VIEW_MODEL_FAILED = 'MONITORING_VIEW_MODEL_FAILED' diff --git a/functional-tests/app/common/read-models/monitoring.projection.js b/functional-tests/app/common/read-models/monitoring.projection.js index 99cc6060f9..3d9e4eb826 100644 --- a/functional-tests/app/common/read-models/monitoring.projection.js +++ b/functional-tests/app/common/read-models/monitoring.projection.js @@ -1,9 +1,13 @@ -import { MONITORING_FAILED_HANDLER } from '../event-types' +import { + MONITORING_EXECUTED_HANDLER, + MONITORING_FAILED_HANDLER, +} from '../event-types' -const aggregate = { +const readModel = { + [MONITORING_EXECUTED_HANDLER]: async () => void 0, [MONITORING_FAILED_HANDLER]: async () => { throw Error('Test read model: event handler failed') }, } -export default aggregate +export default readModel diff --git a/functional-tests/app/common/view-models/resolver-failed.projection.js b/functional-tests/app/common/view-models/resolver-failed.projection.js new file mode 100644 index 0000000000..3a4aae0e78 --- /dev/null +++ b/functional-tests/app/common/view-models/resolver-failed.projection.js @@ -0,0 +1,5 @@ +const projection = { + Init: () => null, +} + +export default projection diff --git a/functional-tests/app/common/view-models/monitoring.resolver.js b/functional-tests/app/common/view-models/resolver-failed.resolver.js similarity index 100% rename from functional-tests/app/common/view-models/monitoring.resolver.js rename to functional-tests/app/common/view-models/resolver-failed.resolver.js diff --git a/functional-tests/app/config.app.js b/functional-tests/app/config.app.js index 2df2156701..17e279101b 100644 --- a/functional-tests/app/config.app.js +++ b/functional-tests/app/config.app.js @@ -115,6 +115,11 @@ const appConfig = { name: 'init-failed-view-model', projection: 'common/view-models/init-failed.projection.js', }, + { + name: 'resolver-failed-view-model', + projection: 'common/view-models/resolver-failed.projection.js', + resolver: 'common/view-models/resolver-failed.resolver.js', + }, ], sagas: [ { diff --git a/functional-tests/utils/utils.ts b/functional-tests/utils/utils.ts index bb70c78413..d154d8fd07 100644 --- a/functional-tests/utils/utils.ts +++ b/functional-tests/utils/utils.ts @@ -1,10 +1,13 @@ import { getClient as getClientInternal, Context } from '@resolve-js/client' -import monitoringResolver from '../app/common/view-models/monitoring.resolver' +import monitoringResolver from '../app/common/view-models/resolver-failed.resolver' import monitoringProjection from '../app/common/view-models/monitoring.projection' import initFailedProjection from '../app/common/view-models/init-failed.projection' +import resolverFailedProjection from '../app/common/view-models/resolver-failed.projection' +import resolverFailedResolver from '../app/common/view-models/resolver-failed.resolver' + export const getTargetURL = () => process.env.RESOLVE_TESTS_TARGET_URL || 'http://0.0.0.0:3000' @@ -25,6 +28,12 @@ const buildContext = (contextOverrides: any): Context => ({ resolver: () => void 0, deserializeState: (state) => JSON.parse(state), }, + { + name: 'resolver-failed-view-model', + projection: resolverFailedProjection, + resolver: resolverFailedResolver, + deserializeState: (state) => JSON.parse(state), + }, ], ...contextOverrides, }) diff --git a/packages/core/core/src/view-model/get-view-models-interop-builder.ts b/packages/core/core/src/view-model/get-view-models-interop-builder.ts index 7776571963..c1fc25f988 100644 --- a/packages/core/core/src/view-model/get-view-models-interop-builder.ts +++ b/packages/core/core/src/view-model/get-view-models-interop-builder.ts @@ -35,6 +35,13 @@ const buildViewModel = async ( const { eventstore, secretsManager, monitoring } = runtime const { jwt } = context + const viewModelMonitoring = + monitoring != null + ? monitoring + .group({ Part: 'ViewModelProjection' }) + .group({ ViewModel: name }) + : null + const aggregateIds = Array().concat(rawIds) const log = getLog(`build-view-model:${name}`) @@ -68,7 +75,13 @@ const buildViewModel = async ( if (cursor == null && typeof projection.Init === 'function') { log.debug(`initializing view model from scratch`) - state = projection.Init() + + try { + state = projection.Init() + } catch (error) { + viewModelMonitoring?.group({ EventType: 'Init' }).error(error) + throw error + } } let eventCount = 0 @@ -111,15 +124,7 @@ const buildViewModel = async ( } catch (error) { subSegment.addError(error) log.error(error.message) - - if (monitoring != null) { - const monitoringGroup = monitoring - .group({ Part: 'ViewModelProjection' }) - .group({ ViewModel: name }) - .group({ EventType: event.type }) - - monitoringGroup.error(error) - } + viewModelMonitoring?.group({ EventType: event.type }).error(error) throw error } finally { subSegment.close()