diff --git a/.eslintrc.yml b/.eslintrc.yml index fd12b9363..87dca16cf 100644 --- a/.eslintrc.yml +++ b/.eslintrc.yml @@ -1,6 +1,7 @@ env: browser: true es2021: true + jest: true extends: standard parserOptions: ecmaVersion: latest @@ -30,4 +31,4 @@ overrides: - files: ["*.mjs"] parserOptions: ecmaVersion: latest - sourceType: module \ No newline at end of file + sourceType: module diff --git a/.github/workflows/autogen/.eslintrc.yml b/.github/workflows/autogen/.eslintrc.yml new file mode 100644 index 000000000..4bad4ebe4 --- /dev/null +++ b/.github/workflows/autogen/.eslintrc.yml @@ -0,0 +1,14 @@ +env: + browser: true + es2021: true +extends: standard +parserOptions: + ecmaVersion: latest + sourceType: module +overrides: + - files: ["*.mjs"] + parserOptions: + ecmaVersion: latest + sourceType: module +rules: + "no-template-curly-in-string": "off" diff --git a/.github/workflows/autogen/README.md b/.github/workflows/autogen/README.md new file mode 100644 index 000000000..c75fe5a1b --- /dev/null +++ b/.github/workflows/autogen/README.md @@ -0,0 +1,42 @@ +# Solo autogen tool + +## Description + +The Solo autogen tool is used to add e2e test cases that need to be ran independently as their own job into the GitHub workflows and into the solo package.json + +## Usage + +from solo root directory: +```bash +cd .github/workflows/autogen +npm install +npm run autogen +``` + +Use git to detect file changes and validate that they are correct. + +The templates need to be maintained, you can either make changes directly to the templates and then run the tool, or make changes in both the workflow yaml files and the templates. Should the templates fall out of sync, then you can update the templates so that when autogen runs again, the git diff will better match. +```bash +template.flow-build-application.yaml +template.flow-pull-request-checks.yaml +template.zxc-code-analysis.yaml +template.zxc-env-vars.yaml + ``` +For new e2e test jobs update the `/.github/workflows/templates/config.yaml`, adding a new item to the tests object with a name and jestPostfix attribute. + +NOTE: IntelliJ copy/paste will alter the escape sequences, you might have to manually type it in, clone a line, or use an external text editor. + +e.g.: +```yaml + - name: Mirror Node + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/mirror_node\\.test\\.mjs\" + +``` + +## Development + +To run lint fix: +```bash +cd .github/workflows/autogen +eslint --fix . +``` diff --git a/.github/workflows/autogen/autogen.mjs b/.github/workflows/autogen/autogen.mjs new file mode 100755 index 000000000..a16d7818c --- /dev/null +++ b/.github/workflows/autogen/autogen.mjs @@ -0,0 +1,20 @@ +#!/usr/bin/env node +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +import * as fnm from './src/index.mjs' + +fnm.main(process.argv) diff --git a/.github/workflows/autogen/package-lock.json b/.github/workflows/autogen/package-lock.json new file mode 100644 index 000000000..7d5d3e2a2 --- /dev/null +++ b/.github/workflows/autogen/package-lock.json @@ -0,0 +1,1077 @@ +{ + "name": "add-e2e-test", + "version": "0.0.1", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "add-e2e-test", + "version": "0.0.1", + "license": "Apache2.0", + "os": [ + "darwin", + "linux", + "win32" + ], + "dependencies": { + "change-case": "^5.4.4", + "js-yaml": "^4.1.0" + }, + "bin": { + "autogen": "autogen.mjs" + }, + "devDependencies": { + "eslint": "^9.10.0" + }, + "engines": { + "node": ">=20.14.0", + "npm": ">=9.8.1" + } + }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.4.0", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", + "integrity": "sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==", + "dev": true, + "dependencies": { + "eslint-visitor-keys": "^3.3.0" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.11.0", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz", + "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==", + "dev": true, + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.18.0", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.18.0.tgz", + "integrity": "sha512-fTxvnS1sRMu3+JjXwJG0j/i4RT9u4qJ+lqS/yCGap4lH4zZGzQ7tu+xZqQmcMZq5OBZDL4QRxQzRjkWcGt8IVw==", + "dev": true, + "dependencies": { + "@eslint/object-schema": "^2.1.4", + "debug": "^4.3.1", + "minimatch": "^3.1.2" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/eslintrc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.1.0.tgz", + "integrity": "sha512-4Bfj15dVJdoy3RfZmmo86RK1Fwzn6SstsvK9JS+BaVKqC6QQQQyXekNaC+g+LKNgkQ+2VhGAzm6hO40AhMR3zQ==", + "dev": true, + "dependencies": { + "ajv": "^6.12.4", + "debug": "^4.3.2", + "espree": "^10.0.1", + "globals": "^14.0.0", + "ignore": "^5.2.0", + "import-fresh": "^3.2.1", + "js-yaml": "^4.1.0", + "minimatch": "^3.1.2", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint/js": { + "version": "9.10.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.10.0.tgz", + "integrity": "sha512-fuXtbiP5GWIn8Fz+LWoOMVf/Jxm+aajZYkhi6CuEm4SxymFM+eUWzbO9qXT+L0iCkL5+KGYMCSGxo686H19S1g==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/object-schema": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.4.tgz", + "integrity": "sha512-BsWiH1yFGjXXS2yvrf5LyuoSIIbPrGUWob917o+BTKuZ7qJdxX8aJLRxs1fS9n6r7vESrq1OUqb68dANcFXuQQ==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.1.0.tgz", + "integrity": "sha512-autAXT203ixhqei9xt+qkYOvY8l6LAFIdT2UXc/RPNeUVfqRF1BV94GTJyVPFKT8nFM6MyVJhjLj9E8JWvf5zQ==", + "dev": true, + "dependencies": { + "levn": "^0.4.1" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.0.tgz", + "integrity": "sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew==", + "dev": true, + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/acorn": { + "version": "8.12.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz", + "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==", + "dev": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/change-case": { + "version": "5.4.4", + "resolved": "https://registry.npmjs.org/change-case/-/change-case-5.4.4.tgz", + "integrity": "sha512-HRQyTk2/YPEkt9TnUPbOpr64Uw3KOicFWPVBb+xiHvd6eBx/qPr9xqfBFDT8P2vWsvvz4jbEkfDe71W3VyNu2w==" + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "dev": true, + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "9.10.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.10.0.tgz", + "integrity": "sha512-Y4D0IgtBZfOcOUAIQTSXBKoNGfY0REGqHJG6+Q81vNippW5YlKjHFj4soMxamKK1NXHUWuBZTLdU3Km+L/pcHw==", + "dev": true, + "dependencies": { + "@eslint-community/eslint-utils": "^4.2.0", + "@eslint-community/regexpp": "^4.11.0", + "@eslint/config-array": "^0.18.0", + "@eslint/eslintrc": "^3.1.0", + "@eslint/js": "9.10.0", + "@eslint/plugin-kit": "^0.1.0", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.3.0", + "@nodelib/fs.walk": "^1.2.8", + "ajv": "^6.12.4", + "chalk": "^4.0.0", + "cross-spawn": "^7.0.2", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^8.0.2", + "eslint-visitor-keys": "^4.0.0", + "espree": "^10.1.0", + "esquery": "^1.5.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "is-path-inside": "^3.0.3", + "json-stable-stringify-without-jsonify": "^1.0.1", + "lodash.merge": "^4.6.2", + "minimatch": "^3.1.2", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3", + "strip-ansi": "^6.0.1", + "text-table": "^0.2.0" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.0.2.tgz", + "integrity": "sha512-6E4xmrTw5wtxnLA5wYL3WDfhZ/1bUBGOXV0zQvVRDOtrR8D0p6W7fs3JweNYhwRYeGvd/1CKX2se0/2s7Q/nJA==", + "dev": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz", + "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==", + "dev": true, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/espree": { + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-10.1.0.tgz", + "integrity": "sha512-M1M6CpiE6ffoigIOWYO9UDP8TMUw9kqb21tf+08IgDYjCsOvCuDt4jQcZmoYxx+w7zlKw9/N0KXfto+I8/FrXA==", + "dev": true, + "dependencies": { + "acorn": "^8.12.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^4.0.0" + }, + "engines": { + "node": "^18.18.0 || ^20.9.0 || >=21.1.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz", + "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==", + "dev": true, + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.1.tgz", + "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", + "dev": true + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/globals": { + "version": "14.0.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-14.0.0.tgz", + "integrity": "sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/import-fresh": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.0.tgz", + "integrity": "sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==", + "dev": true, + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-path-inside": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", + "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", + "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", + "dev": true + }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true + }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", + "dev": true + }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + } + } +} diff --git a/.github/workflows/autogen/package.json b/.github/workflows/autogen/package.json new file mode 100644 index 000000000..00b1ae76a --- /dev/null +++ b/.github/workflows/autogen/package.json @@ -0,0 +1,32 @@ +{ + "name": "add-e2e-test", + "version": "0.0.1", + "description": "uses templates to add e2e tests into the workflows and package.json", + "main": "src/index.mjs", + "type": "module", + "bin": { + "autogen": "autogen.mjs" + }, + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "autogen": "NODE_OPTIONS=--experimental-vm-modules node --no-deprecation autogen.mjs" + }, + "author": "Swirlds Labs", + "license": "Apache2.0", + "os": [ + "darwin", + "linux", + "win32" + ], + "engines": { + "node": ">=20.14.0", + "npm": ">=9.8.1" + }, + "devDependencies": { + "eslint": "^9.10.0" + }, + "dependencies": { + "change-case": "^5.4.4", + "js-yaml": "^4.1.0" + } +} diff --git a/.github/workflows/autogen/src/index.mjs b/.github/workflows/autogen/src/index.mjs new file mode 100644 index 000000000..8f2ebcd42 --- /dev/null +++ b/.github/workflows/autogen/src/index.mjs @@ -0,0 +1,339 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +'use strict' +import * as yaml from 'js-yaml' +import * as fs from 'node:fs' +import * as path from 'node:path' +import { fileURLToPath } from 'url' +import * as changeCase from 'change-case' + +export const AUTOGENERATE_E2E_TEST_JOBS = '# {AUTOGENERATE-E2E-TEST-JOBS}' +export const AUTOGENERATE_E2E_TEST_JOBS_2 = '# {AUTOGENERATE-E2E-TEST-JOBS-2}' +export const AUTOGENERATE_NEEDS = '# {AUTOGENERATE-NEEDS}' +export const AUTOGENERATE_WITH_SUBDIR = '# {AUTOGENERATE-WITH-SUBDIR}' +export const AUTOGENERATE_WITH_COVERAGE_REPORT = '# {AUTOGENERATE-WITH-COVERAGE-REPORT}' +export const AUTOGENERATE_JOB_OUTPUTS_SUB_DIRS = '# {AUTOGENERATE-JOB-OUTPUTS-SUB-DIRS}' +export const AUTOGENERATE_JOB_OUTPUTS_COVERAGE_REPORTS = '# {AUTOGENERATE-JOB-OUTPUTS-COVERAGE-REPORTS}' +export const AUTOGENERATE_WORKFLOW_OUTPUTS_SUB_DIRS = '# {AUTOGENERATE-WORKFLOW-OUTPUTS-SUB-DIRS}' +export const AUTOGENERATE_WORKFLOW_OUTPUTS_COVERAGE_REPORTS = '# {AUTOGENERATE-WORKFLOW-OUTPUTS-COVERAGE-REPORTS}' +export const AUTOGENERATE_INPUTS_SUB_DIRS = '# {AUTOGENERATE-INPUTS-SUB-DIRS}' +export const AUTOGENERATE_INPUTS_COVERAGE_REPORTS = '# {AUTOGENERATE-INPUTS-COVERAGE-REPORTS}' +export const AUTOGENERATE_DOWNLOAD_JOBS = '# {AUTOGENERATE-DOWNLOAD-JOBS}' + +/** + * @typedef {Object} Test + * @property {string} name + * @property {string} jestPostfix + */ + +/** + * @typedef {Object} Config + * @property {string} downloadArtifactAction + * @property {string} downloadArtifactActionComment + * @property {Test[]} tests + */ + +export function main () { + console.log('Begin autogen...') + + const __filename = fileURLToPath(import.meta.url) // get the resolved path to the file + const __dirname = path.dirname(__filename) // get the name of the directory + const outputDir = path.dirname(path.dirname(__dirname)) + const templateDir = path.join(outputDir, 'templates') + const configFile = path.join(templateDir, 'config.yaml') + const configData = fs.readFileSync(configFile, 'utf8') + const config = /** @type {Config} **/ yaml.load(configData) + + // generate the workflows with changes + buildWorkflows(outputDir, templateDir, config) + + // update the Solo package.json with changes + updatePackageJson(outputDir, config) + + console.log('...end autogen') +} + +/** + * Updates the Solo package.json by auto-generating the e2e test scripts based on + * the values in the config + * @param {string} outputDir + * @param {Config} config + */ +function updatePackageJson (outputDir, config) { + const packageJsonDir = path.dirname(path.dirname(outputDir)) + const packageJsonFile = path.join(packageJsonDir, 'package.json') + const inputData = fs.readFileSync(packageJsonFile, 'utf8') + const inputLines = inputData.split('\n') + const outputLines = [] + const generatedLines = [] + const firstMarker = '"test-e2e-all":' + const secondMarker = '"merge-clean":' + let skipNext = false + + inputLines.forEach(line => { + if (line.includes(firstMarker)) { + outputLines.push(line) + skipNext = true + const spacePrefix = line.substring(0, line.indexOf('"test-e2e')) + + config.tests.forEach(test => { + const formalNounName = test.name + const kebabCase = changeCase.kebabCase(formalNounName) + generatedLines.push(`${spacePrefix}"test-e2e-${kebabCase}": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E ${formalNounName} Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-${kebabCase}.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-${kebabCase}' ${test.jestPostfix}",`) + }) + + outputLines.push(...generatedLines) + } else if (line.includes(secondMarker)) { + outputLines.push(line) + skipNext = false + } else if (skipNext) { + // do nothing, we generate these lines after we see the firstMarker + } else { + outputLines.push(line) + } + }) + console.log(`outputFile: ${packageJsonFile}`) + fs.writeFileSync(packageJsonFile, outputLines.join('\n')) +} + +/** + * Autogenerate the GitHub workflows files with the entries needed to add the + * E2E test jobs + * @param {string} outputDir + * @param {string} templateDir + * @param {Config} config + */ +function buildWorkflows (outputDir, templateDir, config) { + const templates = [] + fs.readdirSync(templateDir).forEach(file => { + if (file.substring(0, 'template'.length) === 'template') { + templates.push(file) + } + }) + + templates.forEach(template => { + const templateFile = path.join(templateDir, template) + const templateData = fs.readFileSync(templateFile, 'utf8') + const templateLines = templateData.split('\n') + const outputFile = path.join(outputDir, template.substring('template.'.length)) + const outputLines = [] + console.log(`outputFile: ${outputFile}`) + + templateLines.forEach(line => { + const trimmedLine = line.trim() + + switch (trimmedLine) { + case AUTOGENERATE_E2E_TEST_JOBS: + case AUTOGENERATE_E2E_TEST_JOBS_2: + case AUTOGENERATE_WORKFLOW_OUTPUTS_SUB_DIRS: + case AUTOGENERATE_WORKFLOW_OUTPUTS_COVERAGE_REPORTS: + case AUTOGENERATE_INPUTS_SUB_DIRS: + case AUTOGENERATE_INPUTS_COVERAGE_REPORTS: + autogenerateYaml(line, config, outputLines, trimmedLine) + break + case AUTOGENERATE_NEEDS: + case AUTOGENERATE_WITH_SUBDIR: + case AUTOGENERATE_WITH_COVERAGE_REPORT: + case AUTOGENERATE_JOB_OUTPUTS_SUB_DIRS: + case AUTOGENERATE_JOB_OUTPUTS_COVERAGE_REPORTS: + autogenerateLine(line, config, outputLines, trimmedLine) + break + case AUTOGENERATE_DOWNLOAD_JOBS: + autogenerateLine(line, config, outputLines, trimmedLine) + outputLines.pop() // remove the extra new line character + break + default: + outputLines.push(line) + } + }) + + fs.writeFileSync(outputFile, outputLines.join('\n')) + }) +} + +/** + * Generates the YAML for the provided templateKey + * @param {string} line + * @param {Config} config + * @param {string[]} outputLines + * @param {string} templateKey + */ +export function autogenerateYaml (line, config, outputLines, templateKey) { + const spacePrefix = line.substring(0, + line.indexOf(templateKey)) + let suppressEmptyLines = false + + config.tests.forEach(test => { + const outputYaml = {} + + switch (templateKey) { + case AUTOGENERATE_E2E_TEST_JOBS: + case AUTOGENERATE_E2E_TEST_JOBS_2: + generateTestJobs(test, templateKey, outputYaml) + break + default: + generateOutputs(test, templateKey, outputYaml) + suppressEmptyLines = true + } + + const yamlLines = yaml.dump(outputYaml, { lineWidth: -1, quotingType: '"' }).split('\n') + + yamlLines.forEach(function (line) { + line = line.replaceAll('¡', '"') + if (/^\s*$/.test(line)) { + if (!suppressEmptyLines) { + outputLines.push(line) + } + } else { + outputLines.push(`${spacePrefix}${line}`) + } + }) + }) + + if (!suppressEmptyLines) { + outputLines.pop() // remove the extra new line character + } +} + +/** + * Generates the output lines for the provided templateKey + * @param {Test} test + * @param {string} templateKey + * @param {Object} outputYaml + */ +export function generateOutputs (test, templateKey, outputYaml) { + const formalNounName = test.name + const kebabCase = changeCase.kebabCase(formalNounName) + const snakeCase = changeCase.snakeCase(formalNounName) + let outputKey + const outputValue = {} + + switch (templateKey) { + case AUTOGENERATE_WORKFLOW_OUTPUTS_SUB_DIRS: + outputKey = `e2e-${kebabCase}-test-subdir` + outputValue.description = `¡E2E ${formalNounName} Test Subdirectory¡` + outputValue.value = '${{ jobs.env-vars.outputs.e2e_' + snakeCase + '_test_subdir }}' + break + case AUTOGENERATE_WORKFLOW_OUTPUTS_COVERAGE_REPORTS: + outputKey = `e2e-${kebabCase}-coverage-report` + outputValue.description = `¡E2E ${formalNounName} Tests Coverage Report¡` + outputValue.value = '${{ jobs.env-vars.outputs.e2e_' + snakeCase + + '_coverage_report }}' + break + case AUTOGENERATE_INPUTS_SUB_DIRS: + outputKey = `e2e-${kebabCase}-test-subdir` + outputValue.description = `¡E2E ${formalNounName} Test Subdirectory:¡` + outputValue.type = 'string' + outputValue.required = false + outputValue.default = `¡e2e-${kebabCase}¡` + break + case AUTOGENERATE_INPUTS_COVERAGE_REPORTS: + outputKey = `e2e-${kebabCase}-coverage-report` + outputValue.description = `¡E2E ${formalNounName} Coverage Report:¡` + outputValue.type = 'string' + outputValue.required = false + outputValue.default = `¡E2E ${formalNounName} Tests Coverage Report¡` + } + + outputYaml[outputKey] = outputValue +} + +/** + * Generates the test jobs for the provided templateKey + * @param {Test} test + * @param {string} templateKey + * @param {Object} outputYaml + */ +export function generateTestJobs (test, templateKey, outputYaml) { + const formalNounName = test.name + const kebabCase = changeCase.kebabCase(formalNounName) + const testJobKey = `e2e-${kebabCase}-tests` + const testJobValue = {} + testJobValue.name = 'E2E Tests' + + if (templateKey === AUTOGENERATE_E2E_TEST_JOBS) { + testJobValue.if = '${{ github.event_name == \'push\' || github.event.inputs.enable-e2e-tests == \'true\' }}' + } else { + testJobValue.if = '${{ !cancelled() && always() }}' + } + + testJobValue.uses = './.github/workflows/zxc-e2e-test.yaml' + testJobValue.needs = ['env-vars', 'code-style'] + testJobValue.with = { + 'custom-job-label': formalNounName, + 'npm-test-script': 'test-${{ needs.env-vars.outputs.e2e-' + + kebabCase + '-test-subdir }}', + 'coverage-subdirectory': '${{ needs.env-vars.outputs.e2e-' + + kebabCase + '-test-subdir }}', + 'coverage-report-name': '${{ needs.env-vars.outputs.e2e-' + + kebabCase + '-coverage-report }}' + } + + outputYaml[testJobKey] = testJobValue +} + +/** + * Generates the output line for the provided templateKey + * @param {string} line + * @param {Config} config + * @param {string[]} outputLines + * @param {string} templateKey + */ +export function autogenerateLine (line, config, outputLines, templateKey) { + const spacePrefix = line.substring(0, + line.indexOf(templateKey)) + + config.tests.forEach(test => { + const formalNounName = test.name + const kebabCase = changeCase.kebabCase(formalNounName) + const snakeCase = changeCase.snakeCase(formalNounName) + let namePart + let namePart2 + + switch (templateKey) { + case AUTOGENERATE_WITH_SUBDIR: + namePart = `e2e-${kebabCase}-test` + outputLines.push(spacePrefix + namePart + '-subdir: ${{ needs.env-vars.outputs.' + namePart + '-subdir }}') + break + case AUTOGENERATE_WITH_COVERAGE_REPORT: + namePart = `e2e-${kebabCase}` + outputLines.push(spacePrefix + namePart + '-coverage-report: ${{ needs.env-vars.outputs.' + namePart + '-coverage-report }}') + break + case AUTOGENERATE_NEEDS: + namePart = `e2e-${kebabCase}-tests` + outputLines.push(`${spacePrefix}- ${namePart}`) + break + case AUTOGENERATE_JOB_OUTPUTS_SUB_DIRS: + namePart = `e2e_${snakeCase}_test_subdir` + namePart2 = `e2e-${kebabCase}` + outputLines.push(`${spacePrefix}${namePart}: ${namePart2}`) + break + case AUTOGENERATE_JOB_OUTPUTS_COVERAGE_REPORTS: + namePart = `e2e_${snakeCase}_coverage_report` + outputLines.push(`${spacePrefix}${namePart}: "E2E ${formalNounName} Tests Coverage Report"`) + break + case AUTOGENERATE_DOWNLOAD_JOBS: + outputLines.push(`${spacePrefix}- name: Download E2E ${formalNounName} Coverage Report`) + outputLines.push(`${spacePrefix} uses: ${config.downloadArtifactAction} # ${config.downloadArtifactActionComment}`) + outputLines.push(spacePrefix + ' if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }}') + outputLines.push(`${spacePrefix} with:`) + outputLines.push(spacePrefix + ' name: ${{ inputs.e2e-' + kebabCase + '-coverage-report }}') + outputLines.push(spacePrefix + ' path: \'coverage/${{ inputs.e2e-' + kebabCase + '-test-subdir }}\'') + outputLines.push('') + } + }) +} diff --git a/.github/workflows/flow-build-application.yaml b/.github/workflows/flow-build-application.yaml index 3e1bc2386..cad5ba428 100644 --- a/.github/workflows/flow-build-application.yaml +++ b/.github/workflows/flow-build-application.yaml @@ -64,7 +64,7 @@ jobs: with: custom-job-label: Standard - e2e-tests: + e2e-standard-tests: name: E2E Tests if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} uses: ./.github/workflows/zxc-e2e-test.yaml @@ -73,9 +73,9 @@ jobs: - code-style with: custom-job-label: Standard - npm-test-script: test-${{ needs.env-vars.outputs.e2e-test-subdir }} - coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-test-subdir }} - coverage-report-name: ${{ needs.env-vars.outputs.e2e-coverage-report }} + npm-test-script: test-${{ needs.env-vars.outputs.e2e-standard-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-standard-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-standard-coverage-report }} e2e-mirror-node-tests: name: E2E Tests @@ -84,14 +84,13 @@ jobs: needs: - env-vars - code-style - - e2e-tests with: custom-job-label: Mirror Node npm-test-script: test-${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} coverage-report-name: ${{ needs.env-vars.outputs.e2e-mirror-node-coverage-report }} - e2e-node-pem-stop-add-tests: + e2e-node-pem-stop-tests: name: E2E Tests if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} uses: ./.github/workflows/zxc-e2e-test.yaml @@ -99,12 +98,12 @@ jobs: - env-vars - code-style with: - custom-job-label: Node PEM Stop Add - npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-coverage-report }} + custom-job-label: Node PEM Stop + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pem-stop-coverage-report }} - e2e-node-pfx-kill-add-tests: + e2e-node-pem-kill-tests: name: E2E Tests if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} uses: ./.github/workflows/zxc-e2e-test.yaml @@ -112,13 +111,14 @@ jobs: - env-vars - code-style with: - custom-job-label: Node PFX Kill Add - npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} - coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} - coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-coverage-report }} + custom-job-label: Node PEM Kill + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pem-kill-coverage-report }} e2e-node-local-build-tests: name: E2E Tests + if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} uses: ./.github/workflows/zxc-e2e-test.yaml needs: - env-vars @@ -129,6 +129,58 @@ jobs: coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-local-build-test-subdir }} coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-local-build-coverage-report }} + e2e-node-add-tests: + name: E2E Tests + if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Add + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-add-coverage-report }} + + e2e-node-add-separate-commands-tests: + name: E2E Tests + if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Add - Separate commands + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-coverage-report }} + + e2e-node-update-tests: + name: E2E Tests + if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Update + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-update-coverage-report }} + + e2e-node-delete-tests: + name: E2E Tests + if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Delete + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-delete-coverage-report }} + e2e-relay-tests: name: E2E Tests if: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} @@ -136,7 +188,6 @@ jobs: needs: - env-vars - code-style - - e2e-mirror-node-tests with: custom-job-label: Relay npm-test-script: test-${{ needs.env-vars.outputs.e2e-relay-test-subdir }} @@ -149,11 +200,15 @@ jobs: needs: - env-vars - unit-tests - - e2e-tests + - e2e-standard-tests - e2e-mirror-node-tests - - e2e-node-pem-stop-add-tests - - e2e-node-pfx-kill-add-tests + - e2e-node-pem-stop-tests + - e2e-node-pem-kill-tests - e2e-node-local-build-tests + - e2e-node-add-tests + - e2e-node-add-separate-commands-tests + - e2e-node-update-tests + - e2e-node-delete-tests - e2e-relay-tests if: ${{ (github.event_name == 'push' || github.event.inputs.enable-unit-tests == 'true' || github.event.inputs.enable-e2e-tests == 'true') && !failure() && !cancelled() }} with: @@ -162,17 +217,25 @@ jobs: enable-codecov-analysis: true enable-codacy-coverage: true enable-e2e-coverage-report: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} - e2e-test-subdir: ${{ needs.env-vars.outputs.e2e-test-subdir }} + e2e-standard-test-subdir: ${{ needs.env-vars.outputs.e2e-standard-test-subdir }} e2e-mirror-node-test-subdir: ${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} - e2e-node-pem-stop-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - e2e-node-pfx-kill-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} + e2e-node-pem-stop-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + e2e-node-pem-kill-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} e2e-node-local-build-test-subdir: ${{ needs.env-vars.outputs.e2e-node-local-build-test-subdir }} + e2e-node-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + e2e-node-add-separate-commands-test-subdir: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + e2e-node-update-test-subdir: ${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + e2e-node-delete-test-subdir: ${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} e2e-relay-test-subdir: ${{ needs.env-vars.outputs.e2e-relay-test-subdir }} - e2e-coverage-report: ${{ needs.env-vars.outputs.e2e-coverage-report }} + e2e-standard-coverage-report: ${{ needs.env-vars.outputs.e2e-standard-coverage-report }} e2e-mirror-node-coverage-report: ${{ needs.env-vars.outputs.e2e-mirror-node-coverage-report }} - e2e-node-pem-stop-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-coverage-report }} - e2e-node-pfx-kill-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-coverage-report }} + e2e-node-pem-stop-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-coverage-report }} + e2e-node-pem-kill-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-kill-coverage-report }} e2e-node-local-build-coverage-report: ${{ needs.env-vars.outputs.e2e-node-local-build-coverage-report }} + e2e-node-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-add-coverage-report }} + e2e-node-add-separate-commands-coverage-report: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-coverage-report }} + e2e-node-update-coverage-report: ${{ needs.env-vars.outputs.e2e-node-update-coverage-report }} + e2e-node-delete-coverage-report: ${{ needs.env-vars.outputs.e2e-node-delete-coverage-report }} e2e-relay-coverage-report: ${{ needs.env-vars.outputs.e2e-relay-coverage-report }} secrets: snyk-token: ${{ secrets.SNYK_TOKEN }} diff --git a/.github/workflows/flow-deploy-release-artifact.yaml b/.github/workflows/flow-deploy-release-artifact.yaml index 62987e1da..ba9f15d34 100644 --- a/.github/workflows/flow-deploy-release-artifact.yaml +++ b/.github/workflows/flow-deploy-release-artifact.yaml @@ -40,12 +40,12 @@ permissions: jobs: prepare-release: name: Release / Prepare - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium outputs: version: ${{ steps.tag.outputs.version }} steps: - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 @@ -73,25 +73,43 @@ jobs: GIT_AUTHOR_EMAIL: ${{ secrets.GIT_USER_EMAIL }} GIT_COMMITTER_NAME: ${{ secrets.GIT_USER_NAME }} GIT_COMMITTER_EMAIL: ${{ secrets.GIT_USER_EMAIL }} - run: npx semantic-release --dry-run + run: | + npx semantic-release --dry-run + ls -al + cat VERSION - name: Extract Version id: tag run: | + cat VERSION [[ "${{ github.event.inputs.dry-run-enabled }}" == true && ! -f VERSION ]] && echo -n "0.0.0-latest" > VERSION - echo "version=$(cat VERSION | tr -d '[:space:]')" >> ${GITHUB_OUTPUT} + echo "version=$(cat VERSION | tr -d '[:space:]')" | tee -a ${GITHUB_OUTPUT} + + update-readme: + name: "Update README.md" + uses: ./.github/workflows/flow-update-readme.yaml + needs: + - prepare-release + with: + commit-changes: true + version: ${{ needs.prepare-release.outputs.version }} + secrets: + GH_ACCESS_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} + GH_ACCESS_GPG_KEY: ${{ secrets.GH_ACCESS_GPG_KEY }} + GH_ACCESS_PASSPHRASE: ${{ secrets.GH_ACCESS_PASSPHRASE }} create-github-release: name: Github / Release - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium needs: - - prepare-release + - update-readme steps: - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: token: ${{ secrets.GH_ACCESS_TOKEN }} fetch-depth: 0 + ref: ${{ needs.update-readme.outputs.commit-hash }} - name: Install GnuPG Tools run: | @@ -126,7 +144,7 @@ jobs: attempt_delay: 5000 - name: Setup JFrog CLI - uses: jfrog/setup-jfrog-cli@7c95feb32008765e1b4e626b078dfd897c4340ad # v4.1.2 + uses: jfrog/setup-jfrog-cli@9fe0f98bd45b19e6e931d457f4e98f8f84461fb5 # v4.4.1 env: JF_URL: ${{ vars.JF_URL }} JF_ACCESS_TOKEN: ${{ secrets.JF_ACCESS_TOKEN }} @@ -162,6 +180,7 @@ jobs: - name: Publish Semantic Release env: + commit: ${{ needs.update-readme.outputs.commit-hash }} NPM_TOKEN: ${{ secrets.NPM_TOKEN }} GITHUB_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} GIT_AUTHOR_NAME: ${{ secrets.GIT_USER_NAME }} diff --git a/.github/workflows/flow-hugo-publish.yaml b/.github/workflows/flow-hugo-publish.yaml index d38d5f80a..6d41fbbbe 100644 --- a/.github/workflows/flow-hugo-publish.yaml +++ b/.github/workflows/flow-hugo-publish.yaml @@ -52,7 +52,7 @@ defaults: jobs: # Build job build: - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium steps: - name: Setup Hugo @@ -61,7 +61,7 @@ jobs: hugo-version: '0.124.1' - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 @@ -136,7 +136,7 @@ jobs: environment: name: github-pages url: ${{ steps.deployment.outputs.page_url }} - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium needs: build steps: - name: Deploy to GitHub Pages diff --git a/.github/workflows/flow-pull-request-checks.yaml b/.github/workflows/flow-pull-request-checks.yaml index dc9ef43f0..9c4dd1edb 100644 --- a/.github/workflows/flow-pull-request-checks.yaml +++ b/.github/workflows/flow-pull-request-checks.yaml @@ -52,17 +52,28 @@ jobs: with: custom-job-label: Standard - e2e-tests: + update-readme: + name: "Update README.md" + uses: ./.github/workflows/flow-update-readme.yaml + with: + commit-changes: false + secrets: + GH_ACCESS_TOKEN: ${{ secrets.GH_ACCESS_TOKEN }} + GH_ACCESS_GPG_KEY: ${{ secrets.GH_ACCESS_GPG_KEY }} + GH_ACCESS_PASSPHRASE: ${{ secrets.GH_ACCESS_PASSPHRASE }} + + e2e-standard-tests: name: E2E Tests + if: ${{ !cancelled() && always() }} uses: ./.github/workflows/zxc-e2e-test.yaml needs: - env-vars - code-style with: custom-job-label: Standard - npm-test-script: test-${{ needs.env-vars.outputs.e2e-test-subdir }} - coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-test-subdir }} - coverage-report-name: ${{ needs.env-vars.outputs.e2e-coverage-report }} + npm-test-script: test-${{ needs.env-vars.outputs.e2e-standard-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-standard-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-standard-coverage-report }} e2e-mirror-node-tests: name: E2E Tests @@ -71,39 +82,41 @@ jobs: needs: - env-vars - code-style - - e2e-tests with: custom-job-label: Mirror Node npm-test-script: test-${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} coverage-report-name: ${{ needs.env-vars.outputs.e2e-mirror-node-coverage-report }} - e2e-node-pem-stop-add-tests: + e2e-node-pem-stop-tests: name: E2E Tests + if: ${{ !cancelled() && always() }} uses: ./.github/workflows/zxc-e2e-test.yaml needs: - env-vars - code-style with: - custom-job-label: Node PEM Stop Add - npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-coverage-report }} + custom-job-label: Node PEM Stop + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pem-stop-coverage-report }} - e2e-node-pfx-kill-add-tests: + e2e-node-pem-kill-tests: name: E2E Tests + if: ${{ !cancelled() && always() }} uses: ./.github/workflows/zxc-e2e-test.yaml needs: - env-vars - code-style with: - custom-job-label: Node PFX Kill Add - npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} - coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} - coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-coverage-report }} + custom-job-label: Node PEM Kill + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-pem-kill-coverage-report }} e2e-node-local-build-tests: name: E2E Tests + if: ${{ !cancelled() && always() }} uses: ./.github/workflows/zxc-e2e-test.yaml needs: - env-vars @@ -114,6 +127,58 @@ jobs: coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-local-build-test-subdir }} coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-local-build-coverage-report }} + e2e-node-add-tests: + name: E2E Tests + if: ${{ !cancelled() && always() }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Add + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-add-coverage-report }} + + e2e-node-add-separate-commands-tests: + name: E2E Tests + if: ${{ !cancelled() && always() }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Add - Separate commands + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-coverage-report }} + + e2e-node-update-tests: + name: E2E Tests + if: ${{ !cancelled() && always() }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Update + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-update-coverage-report }} + + e2e-node-delete-tests: + name: E2E Tests + if: ${{ !cancelled() && always() }} + uses: ./.github/workflows/zxc-e2e-test.yaml + needs: + - env-vars + - code-style + with: + custom-job-label: Node Delete + npm-test-script: test-${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} + coverage-subdirectory: ${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} + coverage-report-name: ${{ needs.env-vars.outputs.e2e-node-delete-coverage-report }} + e2e-relay-tests: name: E2E Tests if: ${{ !cancelled() && always() }} @@ -121,7 +186,6 @@ jobs: needs: - env-vars - code-style - - e2e-mirror-node-tests with: custom-job-label: Relay npm-test-script: test-${{ needs.env-vars.outputs.e2e-relay-test-subdir }} @@ -134,28 +198,40 @@ jobs: needs: - env-vars - unit-tests - - e2e-tests + - e2e-standard-tests - e2e-mirror-node-tests - - e2e-node-pem-stop-add-tests - - e2e-node-pfx-kill-add-tests + - e2e-node-pem-stop-tests + - e2e-node-pem-kill-tests - e2e-node-local-build-tests + - e2e-node-add-tests + - e2e-node-add-separate-commands-tests + - e2e-node-update-tests + - e2e-node-delete-tests - e2e-relay-tests if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }} with: custom-job-label: Standard enable-codecov-analysis: true enable-e2e-coverage-report: true - e2e-test-subdir: ${{ needs.env-vars.outputs.e2e-test-subdir }} + e2e-standard-test-subdir: ${{ needs.env-vars.outputs.e2e-standard-test-subdir }} e2e-mirror-node-test-subdir: ${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} - e2e-node-pem-stop-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - e2e-node-pfx-kill-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} + e2e-node-pem-stop-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + e2e-node-pem-kill-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} e2e-node-local-build-test-subdir: ${{ needs.env-vars.outputs.e2e-node-local-build-test-subdir }} + e2e-node-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + e2e-node-add-separate-commands-test-subdir: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + e2e-node-update-test-subdir: ${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + e2e-node-delete-test-subdir: ${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} e2e-relay-test-subdir: ${{ needs.env-vars.outputs.e2e-relay-test-subdir }} - e2e-coverage-report: ${{ needs.env-vars.outputs.e2e-coverage-report }} + e2e-standard-coverage-report: ${{ needs.env-vars.outputs.e2e-standard-coverage-report }} e2e-mirror-node-coverage-report: ${{ needs.env-vars.outputs.e2e-mirror-node-coverage-report }} - e2e-node-pem-stop-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-coverage-report }} - e2e-node-pfx-kill-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-coverage-report }} + e2e-node-pem-stop-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-coverage-report }} + e2e-node-pem-kill-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-kill-coverage-report }} e2e-node-local-build-coverage-report: ${{ needs.env-vars.outputs.e2e-node-local-build-coverage-report }} + e2e-node-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-add-coverage-report }} + e2e-node-add-separate-commands-coverage-report: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-coverage-report }} + e2e-node-update-coverage-report: ${{ needs.env-vars.outputs.e2e-node-update-coverage-report }} + e2e-node-delete-coverage-report: ${{ needs.env-vars.outputs.e2e-node-delete-coverage-report }} e2e-relay-coverage-report: ${{ needs.env-vars.outputs.e2e-relay-coverage-report }} secrets: codecov-token: ${{ secrets.CODECOV_TOKEN }} @@ -166,57 +242,40 @@ jobs: needs: - env-vars - unit-tests - - e2e-tests + - e2e-standard-tests - e2e-mirror-node-tests - - e2e-node-pem-stop-add-tests - - e2e-node-pfx-kill-add-tests + - e2e-node-pem-stop-tests + - e2e-node-pem-kill-tests - e2e-node-local-build-tests + - e2e-node-add-tests + - e2e-node-add-separate-commands-tests + - e2e-node-update-tests + - e2e-node-delete-tests - e2e-relay-tests if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }} with: custom-job-label: Coverage enable-codacy-coverage: true enable-e2e-coverage-report: true - e2e-test-subdir: ${{ needs.env-vars.outputs.e2e-test-subdir }} + e2e-standard-test-subdir: ${{ needs.env-vars.outputs.e2e-standard-test-subdir }} e2e-mirror-node-test-subdir: ${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} - e2e-node-pem-stop-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} - e2e-node-pfx-kill-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} + e2e-node-pem-stop-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-test-subdir }} + e2e-node-pem-kill-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-kill-test-subdir }} e2e-node-local-build-test-subdir: ${{ needs.env-vars.outputs.e2e-node-local-build-test-subdir }} + e2e-node-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-add-test-subdir }} + e2e-node-add-separate-commands-test-subdir: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-test-subdir }} + e2e-node-update-test-subdir: ${{ needs.env-vars.outputs.e2e-node-update-test-subdir }} + e2e-node-delete-test-subdir: ${{ needs.env-vars.outputs.e2e-node-delete-test-subdir }} e2e-relay-test-subdir: ${{ needs.env-vars.outputs.e2e-relay-test-subdir }} - e2e-coverage-report: ${{ needs.env-vars.outputs.e2e-coverage-report }} + e2e-standard-coverage-report: ${{ needs.env-vars.outputs.e2e-standard-coverage-report }} e2e-mirror-node-coverage-report: ${{ needs.env-vars.outputs.e2e-mirror-node-coverage-report }} - e2e-node-pem-stop-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-coverage-report }} - e2e-node-pfx-kill-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-coverage-report }} + e2e-node-pem-stop-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-coverage-report }} + e2e-node-pem-kill-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-kill-coverage-report }} e2e-node-local-build-coverage-report: ${{ needs.env-vars.outputs.e2e-node-local-build-coverage-report }} + e2e-node-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-add-coverage-report }} + e2e-node-add-separate-commands-coverage-report: ${{ needs.env-vars.outputs.e2e-node-add-separate-commands-coverage-report }} + e2e-node-update-coverage-report: ${{ needs.env-vars.outputs.e2e-node-update-coverage-report }} + e2e-node-delete-coverage-report: ${{ needs.env-vars.outputs.e2e-node-delete-coverage-report }} e2e-relay-coverage-report: ${{ needs.env-vars.outputs.e2e-relay-coverage-report }} secrets: codacy-project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} - -# snyk: -# name: Snyk Scan -# uses: ./.github/workflows/zxc-code-analysis.yaml -# needs: -# - env-vars -# - unit-tests -# - e2e-tests -# - e2e-mirror-node-tests -# - e2e-node-pem-stop-add-tests -# - e2e-node-pfx-kill-add-tests -# - e2e-relay-tests -# if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name && github.actor != 'dependabot[bot]' }} -# with: -# custom-job-label: Standard -# enable-snyk-scan: true -# e2e-test-subdir: ${{ needs.env-vars.outputs.e2e-test-subdir }} -# e2e-mirror-node-test-subdir: ${{ needs.env-vars.outputs.e2e-mirror-node-test-subdir }} -# e2e-node-pem-stop-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-test-subdir }} -# e2e-node-pfx-kill-add-test-subdir: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-test-subdir }} -# e2e-relay-test-subdir: ${{ needs.env-vars.outputs.e2e-relay-test-subdir }} -# e2e-coverage-report: ${{ needs.env-vars.outputs.e2e-coverage-report }} -# e2e-mirror-node-coverage-report: ${{ needs.env-vars.outputs.e2e-mirror-node-coverage-report }} -# e2e-node-pem-stop-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pem-stop-add-coverage-report }} -# e2e-node-pfx-kill-add-coverage-report: ${{ needs.env-vars.outputs.e2e-node-pfx-kill-add-coverage-report }} -# e2e-node-local-build-coverage-report: ${{ needs.env-vars.outputs.e2e-node-local-build-coverage-report }} -# e2e-relay-coverage-report: ${{ needs.env-vars.outputs.e2e-relay-coverage-report }} -# secrets: -# snyk-token: ${{ secrets.SNYK_TOKEN }} diff --git a/.github/workflows/flow-pull-request-formatting.yaml b/.github/workflows/flow-pull-request-formatting.yaml index 987e1b42b..e8e1e8a37 100644 --- a/.github/workflows/flow-pull-request-formatting.yaml +++ b/.github/workflows/flow-pull-request-formatting.yaml @@ -39,9 +39,9 @@ concurrency: jobs: title-check: name: Title Check - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium steps: - name: Check PR Title - uses: step-security/conventional-pr-title-action@0eae74515f5a79f8773fa04142dd746df76666ac # v1.0.0 + uses: step-security/conventional-pr-title-action@19fb561b33015fd2184055a05ce5a3bcf2ba3f54 # v3.2.0 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/flow-update-readme.yaml b/.github/workflows/flow-update-readme.yaml index 685ecabdf..c861ead51 100644 --- a/.github/workflows/flow-update-readme.yaml +++ b/.github/workflows/flow-update-readme.yaml @@ -16,29 +16,56 @@ name: Update README.md on: - workflow_dispatch: + workflow_call: + secrets: + GH_ACCESS_TOKEN: + description: "GitHub Access Token" + required: true + GH_ACCESS_GPG_KEY: + description: "GitHub Access GPG Key" + required: true + GH_ACCESS_PASSPHRASE: + description: "GitHub Access Passphrase" + required: true + inputs: + commit-changes: + description: "Commit updated README.md" + type: boolean + required: false + default: false + version: + description: "The version to set the app to during deploy/release" + type: string + required: false + outputs: + commit-hash: + description: "The commit hash for the updated README.md commit" + value: ${{ jobs.update.outputs.commit_hash }} push: - branches: - - main paths: - '**/*.mjs' - '**/*.js' - '**/package*.json' - '**/README.md.template' + defaults: run: shell: bash + permissions: contents: read + jobs: update: - runs-on: [self-hosted, Linux, medium, ephemeral] + outputs: + commit_hash: ${{ steps.commit-readme.outputs.commit_hash }} + runs-on: solo-linux-medium steps: - name: Checkout Code for Push - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 - token: ${{secrets.GH_ACCESS_TOKEN}} + token: ${{ secrets.GH_ACCESS_TOKEN }} - name: Setup Node with Retry uses: Wandalen/wretry.action@6feedb7dedadeb826de0f45ff482b53b379a7844 # v3.5.0 @@ -85,7 +112,12 @@ jobs: - name: Update README.md run: | set -x + npm install npm install -g @hashgraph/solo + npm link + + echo "VERSION=${{ inputs.version }}" + [[ -n "${{ inputs.version }}" ]] && npm version ${{ inputs.version }} -f --no-git-tag-version which solo export SOLO_CLUSTER_NAME=solo @@ -96,10 +128,10 @@ jobs: export KIND_CREATE_CLUSTER_OUTPUT=$( kind create cluster -n "${SOLO_CLUSTER_NAME}" 2>&1 | tee test.log ) - export SOLO_INIT_OUTPUT=$( solo init -t v0.42.5 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s \ - "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pfx | tee test.log ) + export SOLO_INIT_OUTPUT=$( solo init -i node1,node2,node3 -n "${SOLO_NAMESPACE}" -s \ + "${SOLO_CLUSTER_SETUP_NAMESPACE}" | tee test.log ) - export SOLO_NODE_KEYS_OUTPUT=$( solo node keys --gossip-keys --tls-keys --key-format pfx | tee test.log ) + export SOLO_NODE_KEY_PEM_OUTPUT=$( solo node keys --gossip-keys --tls-keys | tee test.log ) export SOLO_CLUSTER_SETUP_OUTPUT=$( solo cluster setup | tee test.log ) @@ -111,19 +143,14 @@ jobs: export SOLO_MIRROR_NODE_DEPLOY_OUTPUT=$( solo mirror-node deploy | tee test.log ) - export SOLO_RELAY_DEPLAY_OUTPUT=$( solo relay deploy -i node0,node1 | tee test.log ) - - export SOLO_INIT_047_OUTPUT=$( solo init -t v0.48.0-alpha.0 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" \ - -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pem | tee test.log ) + export SOLO_RELAY_DEPLAY_OUTPUT=$( solo relay deploy -i node1,node2 | tee test.log ) - export SOLO_NODE_KEY_PEM_OUTPUT=$( solo node keys --gossip-keys --tls-keys --key-format pem | tee test.log ) - export SOLO_RELAY_DEPLOY_OUTPUT=$( solo relay deploy | tee test.log ) echo "Generate README.md" - envsubst '$KIND_CREATE_CLUSTER_OUTPUT,$SOLO_INIT_OUTPUT,$SOLO_NODE_KEYS_OUTPUT, $SOLO_CLUSTER_SETUP_OUTPUT, \ + envsubst '$KIND_CREATE_CLUSTER_OUTPUT,$SOLO_INIT_OUTPUT,$SOLO_NODE_KEY_PEM_OUTPUT,$SOLO_CLUSTER_SETUP_OUTPUT, \ $SOLO_NETWORK_DEPLOY_OUTPUT,$SOLO_NODE_SETUP_OUTPUT,$SOLO_NODE_START_OUTPUT,$SOLO_MIRROR_NODE_DEPLOY_OUTPUT,\ - $SOLO_RELAY_DEPLAY_OUTPUT,$SOLO_INIT_047_OUTPUT,$SOLO_NODE_KEY_PEM_OUTPUT,$SOLO_RELAY_DEPLOY_OUTPUT'\ + $SOLO_RELAY_DEPLAY_OUTPUT,$SOLO_RELAY_DEPLOY_OUTPUT'\ < README.md.template > README.md echo "Remove color codes and lines showing intermediate progress" @@ -166,6 +193,8 @@ jobs: git_commit_gpgsign: true - name: Commit README.md Changes + id: commit-readme + if : ${{ github.event.inputs.dry-run-enabled != 'true' && !cancelled() && !failure() && inputs.commit-changes }} uses: stefanzweifel/git-auto-commit-action@8621497c8c39c72f3e2a999a26b4ca1b5058a842 # v5.0.1 with: commit_message: "auto update README.md [skip ci]" diff --git a/.github/workflows/script/relay_smoke_test.sh b/.github/workflows/script/relay_smoke_test.sh new file mode 100755 index 000000000..ed26e245d --- /dev/null +++ b/.github/workflows/script/relay_smoke_test.sh @@ -0,0 +1,82 @@ +#!/bin/bash +set -eo pipefail + +echo "Starting test network with a single node" + +./test/e2e/setup-e2e.sh +solo network deploy +solo node keys --gossip-keys --tls-keys -i node1 +solo node setup -i node1 +solo node start -i node1 +solo mirror-node deploy +solo relay deploy -i node1 +kubectl port-forward svc/relay-node1-hedera-json-rpc-relay -n solo-e2e 7546:7546 & +kubectl port-forward svc/haproxy-node1-svc -n solo-e2e 50211:50211 & +kubectl port-forward svc/fullstack-deployment-hedera-explorer -n solo-e2e 8080:80 & + +echo "Clone hedera local node" + +cd .. + +if [ -d "hedera-local-node" ]; then + echo "Directory hedera-local-node exists." +else + echo "Directory hedera-local-node does not exist." + git clone https://github.com/hashgraph/hedera-local-node --branch release-2.29.0 +fi + +cd hedera-local-node +npm install + +echo "Generate ECDSA keys, extract from output and save to key.txt" +npm run generate-accounts 3 > key.log +sed -n 's/.* - \(0x[0-9a-f]*\) - \(0x[0-9a-f]*\) - .*/\1 \2/p' key.log > key.txt + +echo "Only keep the private key, the second column of each line of file key.txt" +awk '{print "\"" $2 "\","}' key.txt > private_key_with_quote.txt +awk '{print "" $2 ","}' key.txt > private_key_without_quote.txt + +echo "Remove the comma of the last line before add to json file" +sed '$ s/.$//' private_key_with_quote.txt > private_key_with_quote_final.txt +sed '$ s/.$//' private_key_without_quote.txt > private_key_without_quote_final.txt + +LOCAL_NODE_KEYS=$(cat private_key_with_quote_final.txt) +CONTRACT_TEST_KEYS=$(cat private_key_without_quote_final.txt) + +echo "Add new keys to hardhat.config.js" +git checkout test/smoke/hardhat.config.js +awk '/accounts: \[/ {print; getline; getline; next} 1' test/smoke/hardhat.config.js > test/smoke/hardhat.config.js.tmp +awk -v new_keys="$LOCAL_NODE_KEYS" '/\],/ {print new_keys; print; next} 1' test/smoke/hardhat.config.js.tmp > test/smoke/hardhat.config.js || true +cat test/smoke/hardhat.config.js + +#echo "Run smoke test" +#cd test/smoke +#npm install +#npx hardhat test + +cd .. + +if [ -d "hedera-smart-contracts" ]; then + echo "Directory hedera-smart-contracts exists." +else + echo "Directory hedera-smart-contracts does not exist." + git clone https://github.com/hashgraph/hedera-smart-contracts --branch only-erc20-tests +fi +cd hedera-smart-contracts + +npm install +npx hardhat compile + +echo "Build .env file" + +echo "PRIVATE_KEYS=\"$CONTRACT_TEST_KEYS\"" > .env +echo "RETRY_DELAY=5000 # ms" >> .env +echo "MAX_RETRY=5" >> .env +cat .env + +echo "Start background transaction" +cd ../hedera-local-node; watch npm run generate-accounts 3 > background.log & cd - + +npm list +echo "Run contract test" +npm run hh:test diff --git a/.github/workflows/templates/config.yaml b/.github/workflows/templates/config.yaml new file mode 100644 index 000000000..f987ce09f --- /dev/null +++ b/.github/workflows/templates/config.yaml @@ -0,0 +1,25 @@ +downloadArtifactAction: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 +downloadArtifactActionComment: v4.1.8 +tests: + # name: capitalized noun form + # testFilePrefix: node-update # for node-update.test.mjs + - name: Standard + jestPostfix: --testPathIgnorePatterns=\".*/unit/.*\" --testPathIgnorePatterns=\".*/e2e/commands/mirror_node.*\" --testPathIgnorePatterns=\".*/e2e/commands/node.*\" --testPathIgnorePatterns=\".*/e2e/commands/separate_node.*\" --testPathIgnorePatterns=\".*/e2e/commands/relay.*\" + - name: Mirror Node + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/mirror_node\\.test\\.mjs\" + - name: Node PEM Stop + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/node_pem_stop\\.test\\.mjs\" + - name: Node PEM Kill + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/node_pem_kill\\.test\\.mjs\" + - name: Node Local Build + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/node_local.*\\.test\\.mjs\" + - name: Node Add + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/node_add.*\\.test\\.mjs\" + - name: Node Add - Separate commands + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/separate_node_add.*\\.test\\.mjs\" + - name: Node Update + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/node_update.*\\.test\\.mjs\" + - name: Node Delete + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/node_delete.*\\.test\\.mjs\" + - name: Relay + jestPostfix: --testRegex=\".*\\/e2e\\/commands\\/relay\\.test\\.mjs\" diff --git a/.github/workflows/templates/template.flow-build-application.yaml b/.github/workflows/templates/template.flow-build-application.yaml new file mode 100644 index 000000000..51c76dee3 --- /dev/null +++ b/.github/workflows/templates/template.flow-build-application.yaml @@ -0,0 +1,88 @@ +## +# Copyright (C) 2022-2023 Hedera Hashgraph, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## + +name: "Build Application" +on: + workflow_dispatch: + inputs: + enable-unit-tests: + description: "Unit Testing Enabled" + type: boolean + required: false + default: true + enable-e2e-tests: + description: "E2E Testing Enabled" + type: boolean + required: false + default: false + enable-snyk-scan: + description: "Snyk Scan Enabled" + type: boolean + required: false + default: false + push: + branches: + - main + - 'release/*' + +defaults: + run: + shell: bash + +jobs: + env-vars: + name: Set Environment Variables + uses: ./.github/workflows/zxc-env-vars.yaml + with: + custom-job-label: Set Environment Variables + + code-style: + name: Code Style + uses: ./.github/workflows/zxc-code-style.yaml + with: + custom-job-label: Standard + + unit-tests: + name: Unit Tests + uses: ./.github/workflows/zxc-unit-test.yaml + if: ${{ github.event_name == 'push' || github.event.inputs.enable-unit-tests == 'true' }} + needs: + - code-style + with: + custom-job-label: Standard + + # {AUTOGENERATE-E2E-TEST-JOBS} + + analyze: + name: Analyze + uses: ./.github/workflows/zxc-code-analysis.yaml + needs: + - env-vars + - unit-tests + # {AUTOGENERATE-NEEDS} + if: ${{ (github.event_name == 'push' || github.event.inputs.enable-unit-tests == 'true' || github.event.inputs.enable-e2e-tests == 'true') && !failure() && !cancelled() }} + with: + custom-job-label: Source Code + #enable-snyk-scan: ${{ github.event_name == 'push' || github.event.inputs.enable-snyk-scan == 'true' }} + enable-codecov-analysis: true + enable-codacy-coverage: true + enable-e2e-coverage-report: ${{ github.event_name == 'push' || github.event.inputs.enable-e2e-tests == 'true' }} + # {AUTOGENERATE-WITH-SUBDIR} + # {AUTOGENERATE-WITH-COVERAGE-REPORT} + secrets: + snyk-token: ${{ secrets.SNYK_TOKEN }} + codecov-token: ${{ secrets.CODECOV_TOKEN }} + codacy-project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} diff --git a/.github/workflows/templates/template.flow-pull-request-checks.yaml b/.github/workflows/templates/template.flow-pull-request-checks.yaml new file mode 100644 index 000000000..0c4cb25be --- /dev/null +++ b/.github/workflows/templates/template.flow-pull-request-checks.yaml @@ -0,0 +1,89 @@ +## +# Copyright (C) 2023-2024 Hedera Hashgraph, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## + +name: "PR Checks" +on: + workflow_dispatch: + pull_request: + types: + - opened + - reopened + - synchronize + +defaults: + run: + shell: bash + +concurrency: + group: pr-checks-${{ github.workflow }}-${{ github.head_ref || github.run_id }} + cancel-in-progress: true + +jobs: + env-vars: + name: Set Environment Variables + uses: ./.github/workflows/zxc-env-vars.yaml + with: + custom-job-label: Set Environment Variables + + code-style: + name: Code Style + uses: ./.github/workflows/zxc-code-style.yaml + with: + custom-job-label: Standard + + unit-tests: + name: Unit Tests + uses: ./.github/workflows/zxc-unit-test.yaml + needs: + - code-style + with: + custom-job-label: Standard + + # {AUTOGENERATE-E2E-TEST-JOBS-2} + + codecov: + name: CodeCov + uses: ./.github/workflows/zxc-code-analysis.yaml + needs: + - env-vars + - unit-tests + # {AUTOGENERATE-NEEDS} + if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }} + with: + custom-job-label: Standard + enable-codecov-analysis: true + enable-e2e-coverage-report: true + # {AUTOGENERATE-WITH-SUBDIR} + # {AUTOGENERATE-WITH-COVERAGE-REPORT} + secrets: + codecov-token: ${{ secrets.CODECOV_TOKEN }} + + codacy-coverage: + name: Codacy + uses: ./.github/workflows/zxc-code-analysis.yaml + needs: + - env-vars + - unit-tests + # {AUTOGENERATE-NEEDS} + if: ${{ github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name }} + with: + custom-job-label: Coverage + enable-codacy-coverage: true + enable-e2e-coverage-report: true + # {AUTOGENERATE-WITH-SUBDIR} + # {AUTOGENERATE-WITH-COVERAGE-REPORT} + secrets: + codacy-project-token: ${{ secrets.CODACY_PROJECT_TOKEN }} diff --git a/.github/workflows/templates/template.zxc-code-analysis.yaml b/.github/workflows/templates/template.zxc-code-analysis.yaml new file mode 100644 index 000000000..7f7a0a762 --- /dev/null +++ b/.github/workflows/templates/template.zxc-code-analysis.yaml @@ -0,0 +1,185 @@ +## +# Copyright (C) 2023-2024 Hedera Hashgraph, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## + +name: "ZXC: Code Analysis" +# The purpose of this reusable workflow is to perform static code analysis and code coverage reporting. +# This reusable component is called by the following workflows: +# - .github/workflows/flow-pull-request-checks.yaml +# - .github/workflows/flow-build-application.yaml +# +# This workflow is only run if the pull request is coming from the original repository and not a fork. + +on: + workflow_call: + inputs: + enable-codecov-analysis: + description: "CodeCov Analysis Enabled" + type: boolean + required: false + default: false + enable-codacy-coverage: + description: "Codacy Coverage Enabled" + type: boolean + required: false + default: false + enable-e2e-coverage-report: + description: "E2E Coverage Report Enabled" + type: boolean + required: false + default: false + enable-snyk-scan: + description: "Snyk Scan Enabled" + type: boolean + required: false + default: false + node-version: + description: "NodeJS Version:" + type: string + required: false + default: "20.14.0" + custom-job-label: + description: "Custom Job Label:" + type: string + required: false + default: "Analyze" + # {AUTOGENERATE-INPUTS-SUB-DIRS} + # {AUTOGENERATE-INPUTS-COVERAGE-REPORTS} + secrets: + snyk-token: + description: "The Snyk access token is used by Snyk to analyze the code for vulnerabilities " + required: false + codecov-token: + description: "The CodeCov access token is used by CodeCov.io to analyze the code coverage " + required: false + codacy-project-token: + description: "The Codacy project token used to report code coverage." + required: false + +defaults: + run: + shell: bash + +permissions: + contents: read + actions: read + pull-requests: write + checks: write + statuses: write + +jobs: + analyze: + name: ${{ inputs.custom-job-label || 'Analyze' }} + runs-on: solo-linux-medium + steps: + - name: Checkout Code + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + with: + ref: ${{ github.event.workflow_run.head_branch }} + fetch-depth: ${{ inputs.enable-sonar-analysis && '0' || '' }} + + - name: Setup Node with Retry + uses: Wandalen/wretry.action@6feedb7dedadeb826de0f45ff482b53b379a7844 # v3.5.0 + with: + action: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 + with: | + node-version: ${{ inputs.node-version }} + cache: npm + attempt_limit: 3 + attempt_delay: 5000 + + - name: Download Unit Test Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && !cancelled() && !failure() }} + with: + name: Unit Test Coverage Report + path: 'coverage/unit' + + # {AUTOGENERATE-DOWNLOAD-JOBS} + + - name: Publish To Codecov + uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0 + if: ${{ inputs.enable-codecov-analysis && !cancelled() && !failure() }} + env: + CODECOV_TOKEN: ${{ secrets.codecov-token }} + with: + verbose: true + directory: 'coverage' + + - name: Publish to Codacy + env: + CODACY_PROJECT_TOKEN: ${{ secrets.codacy-project-token }} + if: ${{ inputs.enable-codacy-coverage && !cancelled() && !failure() }} + run: bash <(curl -Ls https://coverage.codacy.com/get.sh) report -l Javascript $(find . -name 'lcov.info' -printf '-r %p ') + + - name: Setup Snyk + env: + SNYK_TOKEN: ${{ secrets.snyk-token }} + if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} + run: npm install -g snyk snyk-to-html @wcj/html-to-markdown-cli + + - name: Snyk Scan + id: snyk + env: + SNYK_TOKEN: ${{ secrets.snyk-token }} + if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} + run: snyk test --org=release-engineering-N6EoZVZn3jw4qNuVkiG5Qs --all-projects --severity-threshold=high --json-file-output=snyk-test.json + + - name: Snyk Code + id: snyk-code + env: + SNYK_TOKEN: ${{ secrets.snyk-token }} + if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} + run: snyk code test --org=release-engineering-N6EoZVZn3jw4qNuVkiG5Qs --severity-threshold=high --json-file-output=snyk-code.json + + - name: Publish Snyk Results + if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} + run: | + if [[ -f "snyk-test.json" && -n "$(cat snyk-test.json | tr -d '[:space:]')" ]]; then + snyk-to-html -i snyk-test.json -o snyk-test.html --summary + html-to-markdown snyk-test.html -o snyk + cat snyk/snyk-test.html.md >> $GITHUB_STEP_SUMMARY + fi + + - name: Publish Snyk Code Results + if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} + run: | + if [[ -f "snyk-code.json" && -n "$(cat snyk-code.json | tr -d '[:space:]')" ]]; then + snyk-to-html -i snyk-code.json -o snyk-code.html --summary + html-to-markdown snyk-code.html -o snyk + cat snyk/snyk-code.html.md >> $GITHUB_STEP_SUMMARY + fi + + - name: Check Snyk Files + if: ${{ always() }} + run: | + echo "::group::Snyk File List" + ls -lah snyk* || true + echo "::endgroup::" + echo "::group::Snyk Test Contents" + cat snyk-test.json || true + echo "::endgroup::" + echo "::group::Snyk Code Contents" + cat snyk-code.json || true + echo "::endgroup::" + + - name: Publish Snyk Reports + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 + if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} + with: + name: Snyk Reports + path: | + snyk-*.html + snyk-*.json diff --git a/.github/workflows/templates/template.zxc-env-vars.yaml b/.github/workflows/templates/template.zxc-env-vars.yaml new file mode 100644 index 000000000..d2b3b27c8 --- /dev/null +++ b/.github/workflows/templates/template.zxc-env-vars.yaml @@ -0,0 +1,44 @@ +## +# Copyright (C) 2023-2024 Hedera Hashgraph, LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +## + +name: "ZXC: Environment Variables" +# The purpose of this reusable workflow is to provide environment variables for use in re-usable workflows. + +on: + workflow_call: + inputs: + custom-job-label: + description: "Custom Job Label:" + type: string + required: false + default: "Set Environment Variables" + outputs: + # {AUTOGENERATE-WORKFLOW-OUTPUTS-SUB-DIRS} + # {AUTOGENERATE-WORKFLOW-OUTPUTS-COVERAGE-REPORTS} + +defaults: + run: + shell: bash + +jobs: + env-vars: + name: ${{ inputs.custom-job-label || 'Set Environment Variables' }} + runs-on: solo-linux-medium + outputs: + # {AUTOGENERATE-JOB-OUTPUTS-SUB-DIRS} + # {AUTOGENERATE-JOB-OUTPUTS-COVERAGE-REPORTS} + steps: + - run: echo "Exposing environment variables to reusable workflows" diff --git a/.github/workflows/zxc-code-analysis.yaml b/.github/workflows/zxc-code-analysis.yaml index f50f9d45f..a67243de0 100644 --- a/.github/workflows/zxc-code-analysis.yaml +++ b/.github/workflows/zxc-code-analysis.yaml @@ -55,61 +55,101 @@ on: type: string required: false default: "Analyze" - e2e-test-subdir: - description: "E2E Test Subdirectory:" + e2e-standard-test-subdir: + description: "E2E Standard Test Subdirectory:" type: string required: false - default: "e2e" + default: "e2e-standard" e2e-mirror-node-test-subdir: description: "E2E Mirror Node Test Subdirectory:" type: string required: false default: "e2e-mirror-node" - e2e-node-pem-stop-add-test-subdir: - description: "E2E Node PEM Stop Add Test Subdirectory:" + e2e-node-pem-stop-test-subdir: + description: "E2E Node PEM Stop Test Subdirectory:" type: string required: false - default: "e2e-node-pem-stop-add" - e2e-node-pfx-kill-add-test-subdir: - description: "E2E Node PFX Kill Add Test Subdirectory:" + default: "e2e-node-pem-stop" + e2e-node-pem-kill-test-subdir: + description: "E2E Node PEM Kill Test Subdirectory:" type: string required: false - default: "e2e-node-pfx-kill-add" + default: "e2e-node-pem-kill" e2e-node-local-build-test-subdir: description: "E2E Node Local Build Test Subdirectory:" type: string required: false default: "e2e-node-local-build" + e2e-node-add-test-subdir: + description: "E2E Node Add Test Subdirectory:" + type: string + required: false + default: "e2e-node-add" + e2e-node-add-separate-commands-test-subdir: + description: "E2E Node Add - Separate commands Test Subdirectory:" + type: string + required: false + default: "e2e-node-add-separate-commands" + e2e-node-update-test-subdir: + description: "E2E Node Update Test Subdirectory:" + type: string + required: false + default: "e2e-node-update" + e2e-node-delete-test-subdir: + description: "E2E Node Delete Test Subdirectory:" + type: string + required: false + default: "e2e-node-delete" e2e-relay-test-subdir: description: "E2E Relay Test Subdirectory:" type: string required: false default: "e2e-relay" - e2e-coverage-report: - description: "E2E Coverage Report:" + e2e-standard-coverage-report: + description: "E2E Standard Coverage Report:" type: string required: false - default: "E2E Tests Coverage Report" + default: "E2E Standard Tests Coverage Report" e2e-mirror-node-coverage-report: description: "E2E Mirror Node Coverage Report:" type: string required: false default: "E2E Mirror Node Tests Coverage Report" - e2e-node-pem-stop-add-coverage-report: - description: "E2E Node PEM Stop Add Coverage Report:" + e2e-node-pem-stop-coverage-report: + description: "E2E Node PEM Stop Coverage Report:" type: string required: false - default: "E2E Node PEM Stop Add Tests Coverage Report" - e2e-node-pfx-kill-add-coverage-report: - description: "E2E Node PFX Kill Add Coverage Report:" + default: "E2E Node PEM Stop Tests Coverage Report" + e2e-node-pem-kill-coverage-report: + description: "E2E Node PEM Kill Coverage Report:" type: string required: false - default: "E2E Node PFX Kill Add Tests Coverage Report" + default: "E2E Node PEM Kill Tests Coverage Report" e2e-node-local-build-coverage-report: description: "E2E Node Local Build Coverage Report:" type: string required: false default: "E2E Node Local Build Tests Coverage Report" + e2e-node-add-coverage-report: + description: "E2E Node Add Coverage Report:" + type: string + required: false + default: "E2E Node Add Tests Coverage Report" + e2e-node-add-separate-commands-coverage-report: + description: "E2E Node Add - Separate commands Coverage Report:" + type: string + required: false + default: "E2E Node Add - Separate commands Tests Coverage Report" + e2e-node-update-coverage-report: + description: "E2E Node Update Coverage Report:" + type: string + required: false + default: "E2E Node Update Tests Coverage Report" + e2e-node-delete-coverage-report: + description: "E2E Node Delete Coverage Report:" + type: string + required: false + default: "E2E Node Delete Tests Coverage Report" e2e-relay-coverage-report: description: "E2E Relay Coverage Report:" type: string @@ -140,10 +180,10 @@ permissions: jobs: analyze: name: ${{ inputs.custom-job-label || 'Analyze' }} - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium steps: - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: ref: ${{ github.event.workflow_run.head_branch }} fetch-depth: ${{ inputs.enable-sonar-analysis && '0' || '' }} @@ -159,53 +199,95 @@ jobs: attempt_delay: 5000 - name: Download Unit Test Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && !cancelled() && !failure() }} with: name: Unit Test Coverage Report path: 'coverage/unit' - - name: Download E2E Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + - name: Download E2E Standard Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} with: - name: ${{ inputs.e2e-coverage-report }} - path: 'coverage/${{ inputs.e2e-test-subdir }}' + name: ${{ inputs.e2e-standard-coverage-report }} + path: 'coverage/${{ inputs.e2e-standard-test-subdir }}' - name: Download E2E Mirror Node Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} with: name: ${{ inputs.e2e-mirror-node-coverage-report }} path: 'coverage/${{ inputs.e2e-mirror-node-test-subdir }}' - - name: Download E2E Node PEM Stop Add Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + - name: Download E2E Node PEM Stop Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} + with: + name: ${{ inputs.e2e-node-pem-stop-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-pem-stop-test-subdir }}' + + - name: Download E2E Node PEM Kill Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} + with: + name: ${{ inputs.e2e-node-pem-kill-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-pem-kill-test-subdir }}' + + - name: Download E2E Node Local Build Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} + with: + name: ${{ inputs.e2e-node-local-build-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-local-build-test-subdir }}' + + - name: Download E2E Node Add Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} + with: + name: ${{ inputs.e2e-node-add-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-add-test-subdir }}' + + - name: Download E2E Node Add - Separate commands Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} + with: + name: ${{ inputs.e2e-node-add-separate-commands-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-add-separate-commands-test-subdir }}' + + - name: Download E2E Node Update Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} with: - name: ${{ inputs.e2e-node-pem-stop-add-coverage-report }} - path: 'coverage/${{ inputs.e2e-node-pem-stop-add-test-subdir }}' + name: ${{ inputs.e2e-node-update-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-update-test-subdir }}' - - name: Download E2E Node PFX Kill Add Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + - name: Download E2E Node Delete Coverage Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} with: - name: ${{ inputs.e2e-node-pfx-kill-add-coverage-report }} - path: 'coverage/${{ inputs.e2e-node-pfx-kill-add-test-subdir }}' + name: ${{ inputs.e2e-node-delete-coverage-report }} + path: 'coverage/${{ inputs.e2e-node-delete-test-subdir }}' - name: Download E2E Relay Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} with: name: ${{ inputs.e2e-relay-coverage-report }} path: 'coverage/${{ inputs.e2e-relay-test-subdir }}' - - name: Download E2E Local Build Coverage Report - uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7 + - name: Download E2E Test Report + uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} with: - name: ${{ inputs.e2e-node-local-build-test-coverage-report }} - path: 'coverage/${{ inputs.e2e-node-local-build-test-subdir }}' + pattern: "e2e_test_report_*" + path: "e2e_test_report" + + - name: Publish E2E Test Report + uses: EnricoMi/publish-unit-test-result-action@82082dac68ad6a19d980f8ce817e108b9f496c2a # v2.17.1 + if: ${{ (inputs.enable-codecov-analysis || inputs.enable-codacy-coverage) && inputs.enable-e2e-coverage-report && !cancelled() && !failure() }} + with: + check_name: "E2E Test Report" + files: "e2e_test_report/**/*.xml" - name: Publish To Codecov uses: codecov/codecov-action@e28ff129e5465c2c0dcc6f003fc735cb6ae0c673 # v4.5.0 @@ -274,7 +356,7 @@ jobs: echo "::endgroup::" - name: Publish Snyk Reports - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: ${{ inputs.enable-snyk-scan && !cancelled() && !failure() }} with: name: Snyk Reports diff --git a/.github/workflows/zxc-code-style.yaml b/.github/workflows/zxc-code-style.yaml index f000dd4a2..fbbe22b23 100644 --- a/.github/workflows/zxc-code-style.yaml +++ b/.github/workflows/zxc-code-style.yaml @@ -49,10 +49,10 @@ permissions: jobs: code-style: name: ${{ inputs.custom-job-label || 'Code Style' }} - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium steps: - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Setup Node with Retry uses: Wandalen/wretry.action@6feedb7dedadeb826de0f45ff482b53b379a7844 # v3.5.0 diff --git a/.github/workflows/zxc-e2e-test.yaml b/.github/workflows/zxc-e2e-test.yaml index 4b1cb5b19..6771258c0 100644 --- a/.github/workflows/zxc-e2e-test.yaml +++ b/.github/workflows/zxc-e2e-test.yaml @@ -37,7 +37,7 @@ on: description: "NPM Test Run Script:" type: string required: false - default: "test-e2e" + default: "test-e2e-standard" coverage-subdirectory: description: "Coverage Report Subdirectory:" type: string @@ -62,56 +62,57 @@ permissions: statuses: write env: - CG_EXEC: export R_UID=$(id -u); CGROUP_LOGLEVEL=DEBUG cgexec -g cpu,memory:user.slice/user-${R_UID}.slice/user@${R_UID}.service/e2e-${{ github.run_id }} --sticky ionice -c 2 -n 2 nice -n 19 + #CG_EXEC: export R_UID=$(id -u); CGROUP_LOGLEVEL=DEBUG cgexec -g cpu,memory:user.slice/user-${R_UID}.slice/user@${R_UID}.service/e2e-${{ github.run_id }} --sticky ionice -c 2 -n 2 nice -n 19 + CG_EXEC: "" jobs: e2e-test: name: ${{ inputs.custom-job-label || 'E2E Test' }} - runs-on: [self-hosted, Linux, large, ephemeral] + runs-on: solo-linux-large steps: - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 - - - name: Setup Control Groups - run: | - echo "::group::Get System Configuration" - USR_ID="$(id -un)" - GRP_ID="$(id -gn)" - E2E_MEM_LIMIT="30064771072" - AGENT_MEM_LIMIT="2147483648" - USER_SLICE="user.slice/user-$(id -u).slice" - USER_SERVICE="${USER_SLICE}/user@$(id -u).service" - E2E_GROUP_NAME="${USER_SERVICE}/e2e-${{ github.run_id }}" - AGENT_GROUP_NAME="${USER_SERVICE}/agent-${{ github.run_id }}" - echo "::endgroup::" - - echo "::group::Install Control Group Tools" - if ! command -v cgcreate >/dev/null 2>&1; then - sudo apt-get update - sudo apt-get install -y cgroup-tools - fi - echo "::endgroup::" - - echo "::group::Create Control Groups" - sudo cgcreate -g cpu,memory:${USER_SLICE} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} - sudo cgcreate -g cpu,memory:${USER_SERVICE} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} - sudo cgcreate -g cpu,memory:${E2E_GROUP_NAME} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} - sudo cgcreate -g cpu,memory:${AGENT_GROUP_NAME} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} - echo "::endgroup::" - - echo "::group::Set Control Group Limits" - cgset -r cpu.weight=768 ${E2E_GROUP_NAME} - cgset -r cpu.weight=500 ${AGENT_GROUP_NAME} - cgset -r memory.max=${E2E_MEM_LIMIT} ${E2E_GROUP_NAME} - cgset -r memory.max=${AGENT_MEM_LIMIT} ${AGENT_GROUP_NAME} - cgset -r memory.swap.max=${E2E_MEM_LIMIT} ${E2E_GROUP_NAME} - cgset -r memory.swap.max=${AGENT_MEM_LIMIT} ${AGENT_GROUP_NAME} - echo "::endgroup::" - - echo "::group::Move Runner Processes to Control Groups" - sudo cgclassify --sticky -g cpu,memory:${AGENT_GROUP_NAME} $(pgrep 'Runner.Listener' | tr '\n' ' ') - sudo cgclassify -g cpu,memory:${AGENT_GROUP_NAME} $(pgrep 'Runner.Worker' | tr '\n' ' ') - echo "::endgroup::" + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 + +# - name: Setup Control Groups +# run: | +# echo "::group::Get System Configuration" +# USR_ID="$(id -un)" +# GRP_ID="$(id -gn)" +# E2E_MEM_LIMIT="30064771072" +# AGENT_MEM_LIMIT="2147483648" +# USER_SLICE="user.slice/user-$(id -u).slice" +# USER_SERVICE="${USER_SLICE}/user@$(id -u).service" +# E2E_GROUP_NAME="${USER_SERVICE}/e2e-${{ github.run_id }}" +# AGENT_GROUP_NAME="${USER_SERVICE}/agent-${{ github.run_id }}" +# echo "::endgroup::" +# +# echo "::group::Install Control Group Tools" +# if ! command -v cgcreate >/dev/null 2>&1; then +# sudo apt-get update +# sudo apt-get install -y cgroup-tools +# fi +# echo "::endgroup::" +# +# echo "::group::Create Control Groups" +# sudo cgcreate -g cpu,memory:${USER_SLICE} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} +# sudo cgcreate -g cpu,memory:${USER_SERVICE} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} +# sudo cgcreate -g cpu,memory:${E2E_GROUP_NAME} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} +# sudo cgcreate -g cpu,memory:${AGENT_GROUP_NAME} -a ${USR_ID}:${GRP_ID} -t ${USR_ID}:${GRP_ID} +# echo "::endgroup::" +# +# echo "::group::Set Control Group Limits" +# cgset -r cpu.weight=768 ${E2E_GROUP_NAME} +# cgset -r cpu.weight=500 ${AGENT_GROUP_NAME} +# cgset -r memory.max=${E2E_MEM_LIMIT} ${E2E_GROUP_NAME} +# cgset -r memory.max=${AGENT_MEM_LIMIT} ${AGENT_GROUP_NAME} +# cgset -r memory.swap.max=${E2E_MEM_LIMIT} ${E2E_GROUP_NAME} +# cgset -r memory.swap.max=${AGENT_MEM_LIMIT} ${AGENT_GROUP_NAME} +# echo "::endgroup::" +# +# echo "::group::Move Runner Processes to Control Groups" +# sudo cgclassify --sticky -g cpu,memory:${AGENT_GROUP_NAME} $(pgrep 'Runner.Listener' | tr '\n' ' ') +# sudo cgclassify -g cpu,memory:${AGENT_GROUP_NAME} $(pgrep 'Runner.Worker' | tr '\n' ' ') +# echo "::endgroup::" - name: Setup Node with Retry uses: Wandalen/wretry.action@6feedb7dedadeb826de0f45ff482b53b379a7844 # v3.5.0 @@ -146,23 +147,23 @@ jobs: run: docker image pull kindest/node:v1.27.3@sha256:3966ac761ae0136263ffdb6cfd4db23ef8a83cba8a463690e98317add2c9ba72 - name: Setup Java - if: ${{ runner.os == 'linux' && inputs.npm-test-script == 'test-e2e-node-local-build' && !cancelled() && !failure() }} - uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 # v4.0.0 + if: ${{ runner.os == 'linux' && (inputs.npm-test-script == 'test-e2e-node-local-build' || inputs.npm-test-script == 'test-e2e-node-add') && !cancelled() && !failure() }} + uses: actions/setup-java@b36c23c0d998641eff861008f374ee103c25ac73 # v4.4.0 with: distribution: temurin java-version: 21.0.1 - name: Setup Gradle - if: ${{ runner.os == 'linux' && inputs.npm-test-script == 'test-e2e-node-local-build' && !cancelled() && !failure() }} - uses: gradle/gradle-build-action@4c39dd82cd5e1ec7c6fa0173bb41b4b6bb3b86ff # v3.3.2 + if: ${{ runner.os == 'linux' && (inputs.npm-test-script == 'test-e2e-node-local-build' || inputs.npm-test-script == 'test-e2e-node-add') && !cancelled() && !failure() }} + uses: gradle/gradle-build-action@ac2d340dc04d9e1113182899e983b5400c17cda1 # v3.5.0 with: cache-read-only: false - name: Build Hedera code locally - if: ${{ runner.os == 'linux' && inputs.npm-test-script == 'test-e2e-node-local-build' && !cancelled() && !failure() }} + if: ${{ runner.os == 'linux' && (inputs.npm-test-script == 'test-e2e-node-local-build' || inputs.npm-test-script == 'test-e2e-node-add') && !cancelled() && !failure() }} run: | cd .. - git clone https://github.com/hashgraph/hedera-services.git + git clone https://github.com/hashgraph/hedera-services.git --depth 1 --branch v0.54.0-alpha.4 cd hedera-services ls -ltr ${{ env.CG_EXEC }} ./gradlew assemble --stacktrace --info @@ -177,25 +178,32 @@ jobs: run: | ${{ env.CG_EXEC }} npm run ${{ inputs.npm-test-script }} + - name: RPC relay smoke test + if: ${{ runner.os == 'linux' && inputs.npm-test-script == 'test-e2e-relay' && !cancelled() && !failure() }} + run: | + echo "Skipped smoke test for relay" + #.github/workflows/script/relay_smoke_test.sh + - name: Upload E2E Logs to GitHub if: ${{ !cancelled() }} - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 with: name: solo-${{ inputs.npm-test-script }}.log path: ~/.solo/logs/* overwrite: true if-no-files-found: error - - name: Publish E2E Test Report - uses: EnricoMi/publish-unit-test-result-action@30eadd5010312f995f0d3b3cff7fe2984f69409e # v2.16.1 + - name: Upload E2E Test Report + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: ${{ steps.npm-deps.conclusion == 'success' && !cancelled() }} with: - check_name: ${{ inputs.coverage-report-name }} - # the subdirectory is also used in the junit name to avoid conflicts - files: "junit-${{ inputs.coverage-subdirectory }}.xml" + name: e2e_test_report_${{ inputs.npm-test-script }} + path: "junit-${{ inputs.coverage-subdirectory }}.xml" + overwrite: true + if-no-files-found: error - name: Publish E2E Coverage Report - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: ${{ !cancelled() }} with: name: ${{ inputs.coverage-report-name }} diff --git a/.github/workflows/zxc-env-vars.yaml b/.github/workflows/zxc-env-vars.yaml index 45cea6797..ebdadef17 100644 --- a/.github/workflows/zxc-env-vars.yaml +++ b/.github/workflows/zxc-env-vars.yaml @@ -26,39 +26,63 @@ on: required: false default: "Set Environment Variables" outputs: - e2e-test-subdir: - description: "E2E Test Subdirectory" - value: ${{ jobs.env-vars.outputs.e2e_test_subdir }} + e2e-standard-test-subdir: + description: "E2E Standard Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_standard_test_subdir }} e2e-mirror-node-test-subdir: description: "E2E Mirror Node Test Subdirectory" value: ${{ jobs.env-vars.outputs.e2e_mirror_node_test_subdir }} - e2e-node-pem-stop-add-test-subdir: - description: "E2E Node PEM Stop Add Test Subdirectory" - value: ${{ jobs.env-vars.outputs.e2e_node_pem_stop_add_test_subdir }} - e2e-node-pfx-kill-add-test-subdir: - description: "E2E Node PFX Kill Add Test Subdirectory" - value: ${{ jobs.env-vars.outputs.e2e_node_pfx_kill_add_test_subdir }} + e2e-node-pem-stop-test-subdir: + description: "E2E Node PEM Stop Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_node_pem_stop_test_subdir }} + e2e-node-pem-kill-test-subdir: + description: "E2E Node PEM Kill Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_node_pem_kill_test_subdir }} e2e-node-local-build-test-subdir: description: "E2E Node Local Build Test Subdirectory" value: ${{ jobs.env-vars.outputs.e2e_node_local_build_test_subdir }} + e2e-node-add-test-subdir: + description: "E2E Node Add Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_node_add_test_subdir }} + e2e-node-add-separate-commands-test-subdir: + description: "E2E Node Add - Separate commands Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_node_add_separate_commands_test_subdir }} + e2e-node-update-test-subdir: + description: "E2E Node Update Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_node_update_test_subdir }} + e2e-node-delete-test-subdir: + description: "E2E Node Delete Test Subdirectory" + value: ${{ jobs.env-vars.outputs.e2e_node_delete_test_subdir }} e2e-relay-test-subdir: description: "E2E Relay Test Subdirectory" value: ${{ jobs.env-vars.outputs.e2e_relay_test_subdir }} - e2e-coverage-report: - description: "E2E Tests Coverage Report" - value: ${{ jobs.env-vars.outputs.e2e_coverage_report }} + e2e-standard-coverage-report: + description: "E2E Standard Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_standard_coverage_report }} e2e-mirror-node-coverage-report: description: "E2E Mirror Node Tests Coverage Report" value: ${{ jobs.env-vars.outputs.e2e_mirror_node_coverage_report }} - e2e-node-pem-stop-add-coverage-report: - description: "E2E Node PEM Stop Add Tests Coverage Report" - value: ${{ jobs.env-vars.outputs.e2e_node_pem_stop_add_coverage_report }} - e2e-node-pfx-kill-add-coverage-report: - description: "E2E Node PFX Kill Add Tests Coverage Report" - value: ${{ jobs.env-vars.outputs.e2e_node_pfx_kill_add_coverage_report }} + e2e-node-pem-stop-coverage-report: + description: "E2E Node PEM Stop Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_node_pem_stop_coverage_report }} + e2e-node-pem-kill-coverage-report: + description: "E2E Node PEM Kill Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_node_pem_kill_coverage_report }} e2e-node-local-build-coverage-report: description: "E2E Node Local Build Tests Coverage Report" value: ${{ jobs.env-vars.outputs.e2e_node_local_build_coverage_report }} + e2e-node-add-coverage-report: + description: "E2E Node Add Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_node_add_coverage_report }} + e2e-node-add-separate-commands-coverage-report: + description: "E2E Node Add - Separate commands Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_node_add_separate_commands_coverage_report }} + e2e-node-update-coverage-report: + description: "E2E Node Update Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_node_update_coverage_report }} + e2e-node-delete-coverage-report: + description: "E2E Node Delete Tests Coverage Report" + value: ${{ jobs.env-vars.outputs.e2e_node_delete_coverage_report }} e2e-relay-coverage-report: description: "E2E Relay Tests Coverage Report" value: ${{ jobs.env-vars.outputs.e2e_relay_coverage_report }} @@ -70,19 +94,27 @@ defaults: jobs: env-vars: name: ${{ inputs.custom-job-label || 'Set Environment Variables' }} - runs-on: [self-hosted, Linux, medium, ephemeral] + runs-on: solo-linux-medium outputs: - e2e_test_subdir: e2e + e2e_standard_test_subdir: e2e-standard e2e_mirror_node_test_subdir: e2e-mirror-node - e2e_node_pem_stop_add_test_subdir: e2e-node-pem-stop-add - e2e_node_pfx_kill_add_test_subdir: e2e-node-pfx-kill-add + e2e_node_pem_stop_test_subdir: e2e-node-pem-stop + e2e_node_pem_kill_test_subdir: e2e-node-pem-kill e2e_node_local_build_test_subdir: e2e-node-local-build + e2e_node_add_test_subdir: e2e-node-add + e2e_node_add_separate_commands_test_subdir: e2e-node-add-separate-commands + e2e_node_update_test_subdir: e2e-node-update + e2e_node_delete_test_subdir: e2e-node-delete e2e_relay_test_subdir: e2e-relay - e2e_coverage_report: "E2E Tests Coverage Report" + e2e_standard_coverage_report: "E2E Standard Tests Coverage Report" e2e_mirror_node_coverage_report: "E2E Mirror Node Tests Coverage Report" - e2e_node_pem_stop_add_coverage_report: "E2E Node PEM Stop Add Tests Coverage Report" - e2e_node_pfx_kill_add_coverage_report: "E2E Node PFX Kill Add Tests Coverage Report" + e2e_node_pem_stop_coverage_report: "E2E Node PEM Stop Tests Coverage Report" + e2e_node_pem_kill_coverage_report: "E2E Node PEM Kill Tests Coverage Report" e2e_node_local_build_coverage_report: "E2E Node Local Build Tests Coverage Report" + e2e_node_add_coverage_report: "E2E Node Add Tests Coverage Report" + e2e_node_add_separate_commands_coverage_report: "E2E Node Add - Separate commands Tests Coverage Report" + e2e_node_update_coverage_report: "E2E Node Update Tests Coverage Report" + e2e_node_delete_coverage_report: "E2E Node Delete Tests Coverage Report" e2e_relay_coverage_report: "E2E Relay Tests Coverage Report" steps: - run: echo "Exposing environment variables to reusable workflows" diff --git a/.github/workflows/zxc-unit-test.yaml b/.github/workflows/zxc-unit-test.yaml index c634a649d..148164b55 100644 --- a/.github/workflows/zxc-unit-test.yaml +++ b/.github/workflows/zxc-unit-test.yaml @@ -52,15 +52,15 @@ jobs: matrix: os: - windows-2022 - - [self-hosted, Linux, medium, ephemeral] + - solo-linux-medium name: "(${{ join(matrix.os, ', ') }})" runs-on: ${{ matrix.os }} steps: - name: Checkout Code - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Setup Node - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ inputs.node-version }} cache: npm @@ -73,28 +73,28 @@ jobs: run: npm test - name: Publish Windows Unit Test Report - uses: EnricoMi/publish-unit-test-result-action/windows/bash@30eadd5010312f995f0d3b3cff7fe2984f69409e # v2.16.1 + uses: EnricoMi/publish-unit-test-result-action/windows/bash@82082dac68ad6a19d980f8ce817e108b9f496c2a # v2.17.1 if: ${{ runner.os == 'Windows' && steps.npm-deps.conclusion == 'success' && !cancelled() }} with: check_name: 'Unit Test Results - ${{ runner.os }}' files: "junit.xml" - name: Publish Linux Unit Test Report - uses: EnricoMi/publish-unit-test-result-action@30eadd5010312f995f0d3b3cff7fe2984f69409e # v2.16.1 + uses: EnricoMi/publish-unit-test-result-action@82082dac68ad6a19d980f8ce817e108b9f496c2a # v2.17.1 if: ${{ runner.os == 'linux' && steps.npm-deps.conclusion == 'success' && !cancelled() }} with: check_name: 'Unit Test Results - ${{ runner.os }}' files: "junit.xml" - name: Publish Unit Test Coverage Report - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: ${{ runner.os == 'linux' && !cancelled() }} with: name: Unit Test Coverage Report path: 'coverage/unit' - name: Publish Test Reports - uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3 + uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 if: ${{ runner.os == 'linux' && steps.npm-deps.conclusion == 'success' && !cancelled() }} with: name: Test Reports diff --git a/DEV.md b/DEV.md index f85260e29..d940483f7 100644 --- a/DEV.md +++ b/DEV.md @@ -22,11 +22,11 @@ Below we describe how you can set up local environment and contribute to `solo`. * In order to run E2E test, we need to set up cluster and install the chart. * Run `./test/e2e/setup-e2e.sh` - * Run `npm run test-e2e` + * Run `npm run test-e2e-standard`, NOTE: this excludes some E2E tests that have their own command * Tests are run in random order. The random seed value is shown as message such as: `Using timestamp seed 1711414247085 for random test order` -* If you like to rerun tests with the same seed, use environment variable `RANDOM_SEED=` with `npm run test-e2e` command. - * Example: `RANDOM_SEED=20 npm run test-e2e`, +* If you like to rerun tests with the same seed, use environment variable `RANDOM_SEED=` with `npm run test-e2e-standard` command. + * Example: `RANDOM_SEED=20 npm run test-e2e-standard`, and you should see an output like: `Using preset seed 20 for random test order` diff --git a/README.md b/README.md index e2ee3b37c..babfe25be 100644 --- a/README.md +++ b/README.md @@ -16,11 +16,9 @@ An opinionated CLI tool to deploy and manage standalone test networks. * [Install Solo](#install-solo) * [Setup Kubernetes cluster](#setup-kubernetes-cluster) * [Generate Node Keys](#generate-node-keys) - * [Legacy keys (.pfx file)](#legacy-keys-pfx-file) * [Standard keys (.pem file)](#standard-keys-pem-file) * [Examples](#examples) - * [Example - 1: Deploy a standalone test network (version `0.42.5`)](#example---1-deploy-a-standalone-test-network-version-0425) - * [Example - 2: Deploy a standalone test network (version `0.47.0-alpha.0`)](#example---2-deploy-a-standalone-test-network-version-0470-alpha0) + * [Example - 1: Deploy a standalone test network (version `0.54.0-alpha.4`)](#example---1-deploy-a-standalone-test-network-version-0540-alpha4) * [Support](#support) * [Contributing](#contributing) * [Code of Conduct](#code-of-conduct) @@ -28,7 +26,10 @@ An opinionated CLI tool to deploy and manage standalone test networks. ## Requirements -* Node(>=20.14.0) (*lts/hydrogen*) +| Solo Version | Node.js | Kind | FST Chart | Hedera | Kubernetes | Kubectl | Helm | k9s | Docker Resources | Java | +|--------------|---------------------------|------------|-----------|----------|------------|------------|---------|------------|-------------------------|--------------| +| 0.29.0 | >= 20.14.0 (lts/hydrogen) | >= v1.29.1 | v0.30.0 | v0.53.0+ | >= v1.27.3 | >= v1.27.3 | v3.14.2 | >= v0.27.4 | Memory >= 8GB, CPU >= 4 | >= 21.0.1+12 | +| 0.30.0 | >= 20.14.0 (lts/hydrogen) | >= v1.29.1 | v0.30.0 | v0.54.0+ | >= v1.27.3 | >= v1.27.3 | v3.14.2 | >= v0.27.4 | Memory >= 8GB, CPU >= 4 | >= 21.0.1+12 | ## Setup @@ -36,7 +37,7 @@ An opinionated CLI tool to deploy and manage standalone test networks. ``` nvm install lts/hydrogen -nvm use lts/hydrogen +nvm use lts/hydrogen ``` * Useful tools: @@ -90,56 +91,63 @@ You can now use your cluster with: kubectl cluster-info --context kind-solo -Thanks for using kind! 😊 +Have a nice day! 👋 ``` You may now view pods in your cluster using `k9s -A` as below: + ``` - Context: kind-solo <0> all Attac… ____ __.________ - Cluster: kind-solo <1> default Delete| |/ _/ __ \______ - User: kind-solo Descri| < \____ / ___/ - K9s Rev: v0.27.4 ⚡️v0.32.3 Edit | | \ / /\___ \ - K8s Rev: v1.27.3 Help |____|__ \ /____//____ > - CPU: n/a Kill \/ \/ + Context: kind-solo <0> all Attach Delete | |/ _/ __ \______ + User: kind-solo Describe

| < \____ / ___/ + K9s Rev: v0.32.5 Edit Help |____|__ \ /____//____ > + CPU: n/a Jump Owner \/ \/ MEM: n/a -┌───────────────────────────────────────────── Pods(all)[9] ─────────────────────────────────────────────┐ -│ NAMESPACE↑ NAME PF READY RESTARTS STATUS IP │ -│ kube-system coredns-5d78c9869d-kc27p ● 1/1 0 Running 10.244.0.4 │ -│ kube-system coredns-5d78c9869d-r8mzz ● 1/1 0 Running 10.244.0.3 │ -│ kube-system etcd-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kindnet-gppbk ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-apiserver-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-controller-manager-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-proxy-wb9w5 ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-scheduler-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ local-path-storage local-path-provisioner-6bc4bddd6b-5vh5d ● 1/1 0 Running 10.244.0.2 │ -│ │ -│ +┌───────────────────────────────────────────────── Pods(all)[11] ─────────────────────────────────────────────────┐ +│ NAMESPACE↑ NAME PF READY STATUS RESTARTS IP NODE │ +│ fullstack-setup console-557956d575-4r5xm ● 1/1 Running 0 10.244.0.5 solo-con │ +│ fullstack-setup minio-operator-7d575c5f84-8shc9 ● 1/1 Running 0 10.244.0.6 solo-con │ +│ kube-system coredns-5d78c9869d-6cfbg ● 1/1 Running 0 10.244.0.4 solo-con │ +│ kube-system coredns-5d78c9869d-gxcjz ● 1/1 Running 0 10.244.0.3 solo-con │ +│ kube-system etcd-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kindnet-k75z6 ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-apiserver-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-controller-manager-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-proxy-cct7t ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-scheduler-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ local-path-storage local-path-provisioner-6bc4bddd6b-gwdp6 ● 1/1 Running 0 10.244.0.2 solo-con │ +│ │ +│ │ +└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ ``` ## Examples -### Example - 1: Deploy a standalone test network (version `0.42.5`) +### Example - 1: Deploy a standalone test network (version `0.54.0-alpha.4`) -* Initialize `solo` with tag `v0.42.5` and list of node names `node0,node1,node2`: +* Initialize `solo` with tag `v0.54.0-alpha.4` and list of node names `node1,node2,node3`: ``` -$ solo init -t v0.42.5 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pfx +# reset .solo directory +rm -rf ~/.solo + +solo init -t v0.54.0-alpha.4 -i node1,node2,node3 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo ********************************************************************************** ✔ Setup home directory and cache -✔ Check dependency: helm [OS: linux, Release: 5.15.0-1058-gke, Arch: x64] +✔ Check dependency: helm [OS: linux, Release: 5.15.0-118-generic, Arch: x64] ✔ Check dependencies ✔ Setup chart manager @@ -150,61 +158,54 @@ If a full reset is needed, delete the directory or relevant sub-directories befo ✔ Copy templates in '/home/runner/.solo/cache' ``` -* Generate `pfx` formatted node keys - -We need to generate `pfx` keys as `pem` key files are only supported by Hedera platform >=`0.47.0-alpha.0`. +* Generate `pem` formatted node keys ``` -$ solo node keys --gossip-keys --tls-keys --key-format pfx +solo node keys --gossip-keys --tls-keys ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo ********************************************************************************** ✔ Initialize -✔ Check keytool exists (Version: 21.0.1+12) ✔ Backup old files -✔ Generate private-node0.pfx for node: node0 -✔ Generate private-node1.pfx for node: node1 -✔ Generate private-node2.pfx for node: node2 -✔ Generate public.pfx file -✔ Clean up temp files +✔ Gossip key for node: node1 +✔ Gossip key for node: node2 +✔ Gossip key for node: node3 ✔ Generate gossip keys ✔ Backup old files -✔ TLS key for node: node0 -✔ TLS key for node: node2 ✔ TLS key for node: node1 +✔ TLS key for node: node2 +✔ TLS key for node: node3 ✔ Generate gRPC TLS keys ✔ Finalize ``` -Key files are generated in `~/.solo/keys` directory. - +PEM key files are generated in `~/.solo/keys` directory. ``` -$ ls ~/.solo/cache/keys - -hedera-node0.crt hedera-node1.crt hedera-node2.crt private-node0.pfx private-node2.pfx -hedera-node0.key hedera-node1.key hedera-node2.key private-node1.pfx public.pfx +hedera-node1.crt hedera-node3.crt s-private-node1.pem s-public-node1.pem unused-gossip-pem +hedera-node1.key hedera-node3.key s-private-node2.pem s-public-node2.pem unused-tls +hedera-node2.crt hedera-node4.crt s-private-node3.pem s-public-node3.pem +hedera-node2.key hedera-node4.key s-private-node4.pem s-public-node4.pem ``` * Setup cluster with shared components - * In a separate terminal, you may run `k9s` to view the pod status. ``` -$ solo cluster setup +solo cluster setup ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo @@ -214,37 +215,49 @@ Kubernetes Namespace : solo ✔ Install 'fullstack-cluster-setup' chart ``` +In a separate terminal, you may run `k9s` to view the pod status. * Deploy helm chart with Hedera network components * It may take a while (5~15 minutes depending on your internet speed) to download various docker images and get the pods started. * If it fails, ensure you have enough resources allocated for Docker engine and retry the command. ``` -$ solo network deploy +solo network deploy ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo ********************************************************************************** ✔ Initialize +✔ Copy Gossip keys to staging +✔ Copy gRPC TLS keys to staging +✔ Prepare staging directory +✔ Copy Gossip keys +✔ Node: node1 +✔ Copy TLS keys +✔ Copy Gossip keys +✔ Node: node2 +✔ Copy Gossip keys +✔ Node: node3 +✔ Copy node keys to secrets ✔ Install chart 'fullstack-deployment' -✔ Check Node: node0 ✔ Check Node: node1 ✔ Check Node: node2 +✔ Check Node: node3 ✔ Check node pods are running -✔ Check Envoy Proxy for: node2 -✔ Check Envoy Proxy for: node0 ✔ Check Envoy Proxy for: node1 -✔ Check HAProxy for: node2 +✔ Check Envoy Proxy for: node3 +✔ Check Envoy Proxy for: node2 ✔ Check HAProxy for: node1 -✔ Check HAProxy for: node0 +✔ Check HAProxy for: node3 +✔ Check HAProxy for: node2 ✔ Check proxy pods are running ✔ Check MinIO ✔ Check auxiliary pods are ready @@ -254,98 +267,88 @@ Kubernetes Namespace : solo * It may take a while as it download the hedera platform code from ``` -$ solo node setup +solo node setup ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo ********************************************************************************** ✔ Initialize -✔ Check network pod: node2 ✔ Check network pod: node1 -✔ Check network pod: node0 +✔ Check network pod: node2 +✔ Check network pod: node3 ✔ Identify network pods -✔ Copy configuration files -✔ Copy Gossip keys to staging -✔ Copy gRPC TLS keys to staging -✔ Prepare config.txt for the network -✔ Prepare staging directory -✔ Update node: node1 -✔ Update node: node2 -✔ Update node: node0 +✔ Update node: node1 [ platformVersion = v0.54.0-alpha.4 ] +✔ Update node: node3 [ platformVersion = v0.54.0-alpha.4 ] +✔ Update node: node2 [ platformVersion = v0.54.0-alpha.4 ] ✔ Fetch platform software into network nodes -✔ Copy Gossip keys -✔ Copy Gossip keys -✔ Copy Gossip keys -✔ Copy TLS keys -✔ Copy TLS keys -✔ Copy TLS keys -✔ Copy configuration files -✔ Copy configuration files -✔ Copy configuration files ✔ Set file permissions -✔ Node: node2 +✔ Node: node1 ✔ Set file permissions -✔ Node: node0 +✔ Node: node3 ✔ Set file permissions -✔ Node: node1 +✔ Node: node2 ✔ Setup network nodes -✔ Finalize ``` -* Start the nodes. +* Start the nodes ``` -$ solo node start +solo node start ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo ********************************************************************************** ✔ Initialize -✔ Check network pod: node0 ✔ Check network pod: node1 ✔ Check network pod: node2 -✔ Identify network pods -✔ Start node: node0 -✔ Start node: node2 +✔ Check network pod: node3 +✔ Identify existing network nodes ✔ Start node: node1 +✔ Start node: node2 +✔ Start node: node3 ✔ Starting nodes -✔ Check node: node0 -✔ Check node: node1 -✔ Check node: node2 +✔ Check network pod: node2 - status ACTIVE, attempt: 16/120 +✔ Check network pod: node3 - status ACTIVE, attempt: 17/120 +✔ Check network pod: node1 - status ACTIVE, attempt: 17/120 ✔ Check nodes are ACTIVE ✔ Check proxy for node: node1 -✔ Check proxy for node: node0 ✔ Check proxy for node: node2 +✔ Check proxy for node: node3 ✔ Check node proxies are ACTIVE +✔ Adding stake for node: node1 +✔ Adding stake for node: node2 +✔ Adding stake for node: node3 +✔ Add node stakes ``` + * Deploy mirror node ``` -$ solo mirror-node deploy +solo mirror-node deploy ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo @@ -356,74 +359,80 @@ Kubernetes Namespace : solo ✔ Enable mirror-node ✔ Check Hedera Explorer ✔ Check Postgres DB -✔ Check Monitor ✔ Check GRPC -✔ Check Importer ✔ Check REST API +✔ Check Importer +✔ Check Monitor ✔ Check pods are ready +✔ Insert data in public.file_data +✔ Seed DB data ``` * Deploy a JSON RPC relay ``` -$ solo relay deploy +solo relay deploy ``` -Example output +* Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo ********************************************************************************** -✔ Initialize -✔ Prepare chart values -✔ Deploy JSON RPC Relay -✔ Check relay is ready +[?25l ``` You may view the list of pods using `k9s` as below: ``` - Context: kind-solo-e2e <0> all Attach … ____ __.________ - Cluster: kind-solo-e2e <1> default Delete

| |/ _/ __ \______ - User: kind-solo-e2e Describe | < \____ / ___/ - K9s Rev: v0.27.4 ⚡️v0.32.4 Edit | | \ / /\___ \ - K8s Rev: v1.27.3 Help |____|__ \ /____//____ > - CPU: n/a Kill \/ \/ +Context: kind-solo <0> all Attach Delete | |/ _/ __ \______ + User: kind-solo Describe

| < \____ / ___/ + K9s Rev: v0.32.5 Edit Help |____|__ \ /____//____ > + CPU: n/a Jump Owner \/ \/ MEM: n/a -┌─────────────────────────────────────────────────── Pods(all)[27] ────────────────────────────────────────────────────┐ -│ NAMESPACE↑ NAME PF READY RESTARTS STATUS IP │ -│ fullstack-setup console-557956d575-fqctd ● 1/1 0 Running 10.244.0.4 │ -│ fullstack-setup minio-operator-7d575c5f84-j9p6f ● 1/1 0 Running 10.244.0.3 │ -│ kube-system coredns-5d78c9869d-gknqp ● 1/1 0 Running 10.244.0.6 │ -│ kube-system coredns-5d78c9869d-q59pc ● 1/1 0 Running 10.244.0.5 │ -│ kube-system etcd-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kindnet-w9ps5 ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-apiserver-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-controller-manager-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-proxy-p69z8 ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-scheduler-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ local-path-storage local-path-provisioner-6bc4bddd6b-8pkfk ● 1/1 0 Running 10.244.0.2 │ -│ solo envoy-proxy-node0-84947f844f-f28tp ● 1/1 0 Running 10.244.0.215 │ -│ solo envoy-proxy-node1-65f8879dcc-j2lrk ● 1/1 0 Running 10.244.0.216 │ -│ solo envoy-proxy-node2-667f848689-dkmf9 ● 1/1 0 Running 10.244.0.214 │ -│ solo fullstack-deployment-grpc-69f9cc5666-lf6ql ● 1/1 0 Running 10.244.0.227 │ -│ solo fullstack-deployment-hedera-explorer-79f79b7df4-wjdct ● 1/1 0 Running 10.244.0.226 │ -│ solo fullstack-deployment-importer-864489ffb8-6v8tk ● 1/1 0 Running 10.244.0.228 │ -│ solo fullstack-deployment-postgres-postgresql-0 ● 1/1 0 Running 10.244.0.232 │ -│ solo fullstack-deployment-rest-584f5cb6bb-q9vnt ● 1/1 0 Running 10.244.0.230 │ -│ solo fullstack-deployment-web3-69dcdfc4fb-mm5pk ● 1/1 0 Running 10.244.0.229 │ -│ solo haproxy-node0-6969f76c77-n5cfl ● 1/1 1 Running 10.244.0.219 │ -│ solo haproxy-node1-59f6976d45-x6xmp ● 1/1 1 Running 10.244.0.217 │ -│ solo haproxy-node2-6df64d5457-hf9ps ● 1/1 1 Running 10.244.0.218 │ -│ solo minio-pool-1-0 ● 2/2 1 Running 10.244.0.224 │ -│ solo network-node0-0 ● 5/5 0 Running 10.244.0.221 │ -│ solo network-node1-0 ● 5/5 0 Running 10.244.0.222 │ -│ solo network-node2-0 ● 5/5 0 Running 10.244.0.220 │ +┌───────────────────────────────────────────────── Pods(all)[31] ─────────────────────────────────────────────────┐ +│ NAMESPACE↑ NAME PF READY STATUS RESTARTS I │ +│ kube-system coredns-5d78c9869d-994t4 ● 1/1 Running 0 1 │ +│ kube-system coredns-5d78c9869d-vgt4q ● 1/1 Running 0 1 │ +│ kube-system etcd-solo-control-plane ● 1/1 Running 0 1 │ +│ kube-system kindnet-q26c9 ● 1/1 Running 0 1 │ +│ kube-system kube-apiserver-solo-control-plane ● 1/1 Running 0 1 │ +│ kube-system kube-controller-manager-solo-control-plane ● 1/1 Running 0 1 │ +│ kube-system kube-proxy-9b27j ● 1/1 Running 0 1 │ +│ kube-system kube-scheduler-solo-control-plane ● 1/1 Running 0 1 │ +│ local-path-storage local-path-provisioner-6bc4bddd6b-4mv8c ● 1/1 Running 0 1 │ +│ solo envoy-proxy-node1-65f8879dcc-rwg97 ● 1/1 Running 0 1 │ +│ solo envoy-proxy-node2-667f848689-628cx ● 1/1 Running 0 1 │ +│ solo envoy-proxy-node3-6bb4b4cbdf-dmwtr ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-grpc-75bb9c6c55-l7kvt ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-hedera-explorer-6565ccb4cb-9dbw2 ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-importer-dd74fd466-vs4mb ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-monitor-54b8f57db9-fn5qq ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-postgres-postgresql-0 ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-redis-node-0 ● 2/2 Running 0 1 │ +│ solo fullstack-deployment-rest-6d48f8dbfc-plbp2 ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-restjava-5d6c4cb648-r597f ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-web3-55fdfbc7f7-lzhfl ● 1/1 Running 0 1 │ +│ solo haproxy-node1-785b9b6f9b-676mr ● 1/1 Running 1 1 │ +│ solo haproxy-node2-644b8c76d-v9mg6 ● 1/1 Running 1 1 │ +│ solo haproxy-node3-fbffdb64-272t2 ● 1/1 Running 1 1 │ +│ solo minio-pool-1-0 ● 2/2 Running 1 1 │ +│ solo network-node1-0 ● 5/5 Running 2 1 │ +│ solo network-node2-0 ● 5/5 Running 2 1 │ +│ solo network-node3-0 ● 5/5 Running 2 1 │ +│ solo relay-node1-node2-node3-hedera-json-rpc-relay-ddd4c8d8b-hdlpb ● 1/1 Running 0 1 │ +│ solo-cluster console-557956d575-c5qp7 ● 1/1 Running 0 1 │ +│ solo-cluster minio-operator-7d575c5f84-xdwwz ● 1/1 Running 0 1 │ +│ │ +└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ + ``` #### Access Hedera Network services @@ -437,7 +446,7 @@ Once the nodes are up, you may now expose various services (using `k9s` (shift-f * JSON Rpc Relays * You can deploy JSON RPC relays for one or more nodes as below: ``` - $ solo relay deploy -i node0,node1 + solo relay deploy -i node1,node2 ``` Example output @@ -445,7 +454,7 @@ Example output ``` ******************************* Solo ********************************************* -Version : 0.27.0 +Version : 0.30.1 Kubernetes Context : kind-solo Kubernetes Cluster : kind-solo Kubernetes Namespace : solo @@ -456,137 +465,88 @@ Kubernetes Namespace : solo ✔ Check relay is ready ``` -### Example - 2: Deploy a standalone test network (version `0.47.0-alpha.0`) - -* Initialize `solo` with tag `v0.47.0-alpha.0` and list of node names `node0,node1,node2`: - -``` -# reset .solo directory -$ rm -rf ~/.solo - -$ solo init -t v0.47.0-alpha.0 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pem -``` - -* Example output - -``` - -******************************* Solo ********************************************* -Version : 0.27.0 -Kubernetes Context : kind-solo -Kubernetes Cluster : kind-solo -Kubernetes Namespace : solo -********************************************************************************** -✔ Setup home directory and cache -✔ Check dependency: helm [OS: linux, Release: 5.15.0-1058-gke, Arch: x64] -✔ Check dependencies -✔ Setup chart manager +## For Developers Working on Hedera Service Repo -*************************************************************************************** -Note: solo stores various artifacts (config, logs, keys etc.) in its home directory: /home/runner/.solo -If a full reset is needed, delete the directory or relevant sub-directories before running 'solo init'. -*************************************************************************************** -✔ Copy templates in '/home/runner/.solo/cache' -``` +First, please clone hedera service repo `https://github.com/hashgraph/hedera-services/` and build the code +with `./gradlew assemble`. If need to running nodes with different versions or releases, please duplicate the repo or build directories in +multiple directories, checkout to the respective version and build the code. -* Generate `pem` formatted node keys +To set customized `settings.txt` file, edit the file +`~/.solo/cache/templates/settings.txt` after `solo init` command. +Then you can start customized built hedera network with the following command: ``` -$ solo node keys --gossip-keys --tls-keys --key-format pem -``` - -* Example output +solo node setup --local-build-path ,node1=,node2= +# example: solo node setup --local-build-path node1=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node3=../hedera-services/hedera-node/data ``` -******************************* Solo ********************************************* -Version : 0.27.0 -Kubernetes Context : kind-solo -Kubernetes Cluster : kind-solo -Kubernetes Namespace : solo -********************************************************************************** -✔ Initialize -✔ Backup old files -✔ Gossip pem key for node: node0 -✔ Gossip pem key for node: node1 -✔ Gossip pem key for node: node2 -✔ Generate gossip keys -✔ Backup old files -✔ TLS key for node: node1 -✔ TLS key for node: node2 -✔ TLS key for node: node0 -✔ Generate gRPC TLS keys -✔ Finalize -``` -PEM key files are generated in `~/.solo/keys` directory. -``` -$ ls ~/.solo/cache/keys -a-private-node0.pem a-public-node1.pem hedera-node1.crt s-private-node0.pem s-public-node1.pem -a-private-node1.pem a-public-node2.pem hedera-node1.key s-private-node1.pem s-public-node2.pem -a-private-node2.pem hedera-node0.crt hedera-node2.crt s-private-node2.pem -a-public-node0.pem hedera-node0.key hedera-node2.key s-public-node0.pem -``` -* Setup cluster with shared components +## For Developers Working on Platform core +To deploy node with local build PTT jar files, run the following command: ``` -$ solo cluster setup +solo node setup --local-build-path ,node1=,node2= --app PlatformTestingTool.jar --app-config -# output is similar to example-1 +# example: solo node setup --local-build-path ../hedera-services/platform-sdk/sdk/data,node1=../hedera-services/platform-sdk/sdk/data,node2=../hedera-services/platform-sdk/sdk/data --app PlatformTestingTool.jar --app-config ../hedera-services/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-2.5k-5m.json ``` +## Logs +You can find log for running solo command under the directory `~/.solo/logs/` +The file `solo.log` contains the logs for the solo command. +The file `hashgraph-sdk.log` contains the logs from Solo client when sending transactions to network nodes. -In a separate terminal, you may run `k9s` to view the pod status. - -* Deploy helm chart with Hedera network components +## Using IntelliJ remote debug with Solo -``` -$ solo network deploy +NOTE: the hedera-services path referenced '../hedera-services/hedera-node/data' may need to be updated based on what directory you are currently in. This also assumes that you have done an assemble/build and the directory contents are up-to-date. -# output is similar to example-1 +Example 1: attach jvm debugger to a hedera node +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3 --debug-nodeid node2 +solo node setup -i node1,node2,node3 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3 --debug-nodeid node2 ``` -* Setup node with Hedera platform. - * It may take a while (~10 minutes depending on your internet speed) to download various docker images and get the - pods started. +Once you see the following message, you can launch jvm debugger from Intellij ``` -$ solo node setup - -# output is similar to example-1 + Check node: node1, + Check node: node3, Please attach JVM debugger now. + Check node: node4, ``` -* Start the nodes - -``` -$ solo node start +Example 2: attach jvm debugger with node add operation -# output is similar to example-1 +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3 --pvcs +solo node setup -i node1,node2,node3 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3 +solo node add --gossip-keys --tls-keys --node-id node4 --debug-nodeid node4 --local-build-path ../hedera-services/hedera-node/data ``` -## For Developers Working on Hedera Service Repo - -First, pleaes clone hedera service repo `https://github.com/hashgraph/hedera-services/` and build the code -with `./gradlew assemble`. If need to running nodes with different versions or releases, please duplicate the repo or build directories in -multiple directories, checkout to the respective version and build the code. -To set customized `settings.txt` file, edit the file -`~/.solo/cache/templates/settings.txt` after `solo init` command. +Example 3: attach jvm debugger with node update operation -Then you can start customized built hedera network with the following command: -``` -solo node setup --local-build-path ,node1=,node2= +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3 +solo node setup -i node1,node2,node3 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3 +solo node update --node-id node2 --debug-nodeid node2 --local-build-path ../hedera-services/hedera-node/data --new-account-number 0.0.7 --gossip-public-key ./s-public-node2.pem --gossip-private-key ./s-private-node2.pem --agreement-public-key ./a-public-node2.pem --agreement-private-key ./a-private-node2.pem ``` -## For Developers Working on Platform core +Example 4: attach jvm debugger with node delete operation -To deploy node with local build PTT jar files, run the following command: -``` -solo node setup --local-build-path ,node1=,node2= - --app PlatformTestingTool.jar --app-config +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3,node4 +solo node setup -i node1,node2,node3,node4 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3,node4 +solo node delete --node-id node2 --debug-nodeid node3 ``` -## Logs -You can find log for running solo command under the directory `~/.solo/logs/` -The file `solo.log` contains the logs for the solo command. -The file `hashgraph-sdk.log` contains the logs from solo client when sending transactions to network nodes. - ## Support diff --git a/README.md.template b/README.md.template index c98006bb6..5d2750de4 100644 --- a/README.md.template +++ b/README.md.template @@ -16,11 +16,9 @@ An opinionated CLI tool to deploy and manage standalone test networks. * [Install Solo](#install-solo) * [Setup Kubernetes cluster](#setup-kubernetes-cluster) * [Generate Node Keys](#generate-node-keys) - * [Legacy keys (.pfx file)](#legacy-keys-pfx-file) * [Standard keys (.pem file)](#standard-keys-pem-file) * [Examples](#examples) - * [Example - 1: Deploy a standalone test network (version `0.42.5`)](#example---1-deploy-a-standalone-test-network-version-0425) - * [Example - 2: Deploy a standalone test network (version `0.47.0-alpha.0`)](#example---2-deploy-a-standalone-test-network-version-0470-alpha0) + * [Example - 1: Deploy a standalone test network (version `0.54.0-alpha.4`)](#example---1-deploy-a-standalone-test-network-version-0540-alpha4) * [Support](#support) * [Contributing](#contributing) * [Code of Conduct](#code-of-conduct) @@ -28,7 +26,10 @@ An opinionated CLI tool to deploy and manage standalone test networks. ## Requirements -* Node(>=20.14.0) (*lts/hydrogen*) +| Solo Version | Node.js | Kind | FST Chart | Hedera | Kubernetes | Kubectl | Helm | k9s | Docker Resources | Java | +|--------------|---------------------------|------------|-----------|----------|------------|------------|---------|------------|-------------------------|--------------| +| 0.29.0 | >= 20.14.0 (lts/hydrogen) | >= v1.29.1 | v0.30.0 | v0.53.0+ | >= v1.27.3 | >= v1.27.3 | v3.14.2 | >= v0.27.4 | Memory >= 8GB, CPU >= 4 | >= 21.0.1+12 | +| 0.30.0 | >= 20.14.0 (lts/hydrogen) | >= v1.29.1 | v0.30.0 | v0.54.0+ | >= v1.27.3 | >= v1.27.3 | v3.14.2 | >= v0.27.4 | Memory >= 8GB, CPU >= 4 | >= 21.0.1+12 | ## Setup @@ -36,7 +37,7 @@ An opinionated CLI tool to deploy and manage standalone test networks. ``` nvm install lts/hydrogen -nvm use lts/hydrogen +nvm use lts/hydrogen ``` * Useful tools: @@ -83,89 +84,93 @@ $KIND_CREATE_CLUSTER_OUTPUT You may now view pods in your cluster using `k9s -A` as below: + ``` - Context: kind-solo <0> all Attac… ____ __.________ - Cluster: kind-solo <1> default Delete| |/ _/ __ \______ - User: kind-solo Descri| < \____ / ___/ - K9s Rev: v0.27.4 ⚡️v0.32.3 Edit | | \ / /\___ \ - K8s Rev: v1.27.3 Help |____|__ \ /____//____ > - CPU: n/a Kill \/ \/ + Context: kind-solo <0> all Attach Delete | |/ _/ __ \______ + User: kind-solo Describe

| < \____ / ___/ + K9s Rev: v0.32.5 Edit Help |____|__ \ /____//____ > + CPU: n/a Jump Owner \/ \/ MEM: n/a -┌───────────────────────────────────────────── Pods(all)[9] ─────────────────────────────────────────────┐ -│ NAMESPACE↑ NAME PF READY RESTARTS STATUS IP │ -│ kube-system coredns-5d78c9869d-kc27p ● 1/1 0 Running 10.244.0.4 │ -│ kube-system coredns-5d78c9869d-r8mzz ● 1/1 0 Running 10.244.0.3 │ -│ kube-system etcd-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kindnet-gppbk ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-apiserver-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-controller-manager-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-proxy-wb9w5 ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-scheduler-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ local-path-storage local-path-provisioner-6bc4bddd6b-5vh5d ● 1/1 0 Running 10.244.0.2 │ -│ │ -│ +┌───────────────────────────────────────────────── Pods(all)[11] ─────────────────────────────────────────────────┐ +│ NAMESPACE↑ NAME PF READY STATUS RESTARTS IP NODE │ +│ fullstack-setup console-557956d575-4r5xm ● 1/1 Running 0 10.244.0.5 solo-con │ +│ fullstack-setup minio-operator-7d575c5f84-8shc9 ● 1/1 Running 0 10.244.0.6 solo-con │ +│ kube-system coredns-5d78c9869d-6cfbg ● 1/1 Running 0 10.244.0.4 solo-con │ +│ kube-system coredns-5d78c9869d-gxcjz ● 1/1 Running 0 10.244.0.3 solo-con │ +│ kube-system etcd-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kindnet-k75z6 ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-apiserver-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-controller-manager-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-proxy-cct7t ● 1/1 Running 0 172.18.0.2 solo-con │ +│ kube-system kube-scheduler-solo-control-plane ● 1/1 Running 0 172.18.0.2 solo-con │ +│ local-path-storage local-path-provisioner-6bc4bddd6b-gwdp6 ● 1/1 Running 0 10.244.0.2 solo-con │ +│ │ +│ │ +└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ ``` ## Examples -### Example - 1: Deploy a standalone test network (version `0.42.5`) +### Example - 1: Deploy a standalone test network (version `0.54.0-alpha.4`) -* Initialize `solo` with tag `v0.42.5` and list of node names `node0,node1,node2`: +* Initialize `solo` with tag `v0.54.0-alpha.4` and list of node names `node1,node2,node3`: ``` -$ solo init -t v0.42.5 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pfx +# reset .solo directory +rm -rf ~/.solo + +solo init -t v0.54.0-alpha.4 -i node1,node2,node3 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" ``` -Example output +* Example output ``` $SOLO_INIT_OUTPUT ``` -* Generate `pfx` formatted node keys - -We need to generate `pfx` keys as `pem` key files are only supported by Hedera platform >=`0.47.0-alpha.0`. +* Generate `pem` formatted node keys ``` -$ solo node keys --gossip-keys --tls-keys --key-format pfx +solo node keys --gossip-keys --tls-keys ``` -Example output +* Example output ``` -$SOLO_NODE_KEYS_OUTPUT +$SOLO_NODE_KEY_PEM_OUTPUT ``` -Key files are generated in `~/.solo/keys` directory. - +PEM key files are generated in `~/.solo/keys` directory. ``` -$ ls ~/.solo/cache/keys - -hedera-node0.crt hedera-node1.crt hedera-node2.crt private-node0.pfx private-node2.pfx -hedera-node0.key hedera-node1.key hedera-node2.key private-node1.pfx public.pfx +hedera-node1.crt hedera-node3.crt s-private-node1.pem s-public-node1.pem unused-gossip-pem +hedera-node1.key hedera-node3.key s-private-node2.pem s-public-node2.pem unused-tls +hedera-node2.crt hedera-node4.crt s-private-node3.pem s-public-node3.pem +hedera-node2.key hedera-node4.key s-private-node4.pem s-public-node4.pem ``` * Setup cluster with shared components - * In a separate terminal, you may run `k9s` to view the pod status. ``` -$ solo cluster setup +solo cluster setup ``` -Example output +* Example output ``` $SOLO_CLUSTER_SETUP_OUTPUT ``` +In a separate terminal, you may run `k9s` to view the pod status. * Deploy helm chart with Hedera network components * It may take a while (5~15 minutes depending on your internet speed) to download various docker images and get the pods started. * If it fails, ensure you have enough resources allocated for Docker engine and retry the command. ``` -$ solo network deploy +solo network deploy ``` -Example output +* Example output ``` $SOLO_NETWORK_DEPLOY_OUTPUT @@ -175,33 +180,34 @@ $SOLO_NETWORK_DEPLOY_OUTPUT * It may take a while as it download the hedera platform code from ``` -$ solo node setup +solo node setup ``` -Example output +* Example output ``` $SOLO_NODE_SETUP_OUTPUT ``` -* Start the nodes. +* Start the nodes ``` -$ solo node start +solo node start ``` -Example output +* Example output ``` $SOLO_NODE_START_OUTPUT ``` + * Deploy mirror node ``` -$ solo mirror-node deploy +solo mirror-node deploy ``` -Example output +* Example output ``` $SOLO_MIRROR_NODE_DEPLOY_OUTPUT @@ -210,10 +216,10 @@ $SOLO_MIRROR_NODE_DEPLOY_OUTPUT * Deploy a JSON RPC relay ``` -$ solo relay deploy +solo relay deploy ``` -Example output +* Example output ``` $SOLO_RELAY_DEPLOY_OUTPUT @@ -222,42 +228,49 @@ $SOLO_RELAY_DEPLOY_OUTPUT You may view the list of pods using `k9s` as below: ``` - Context: kind-solo-e2e <0> all Attach … ____ __.________ - Cluster: kind-solo-e2e <1> default Delete

| |/ _/ __ \______ - User: kind-solo-e2e Describe | < \____ / ___/ - K9s Rev: v0.27.4 ⚡️v0.32.4 Edit | | \ / /\___ \ - K8s Rev: v1.27.3 Help |____|__ \ /____//____ > - CPU: n/a Kill \/ \/ +Context: kind-solo <0> all Attach Delete | |/ _/ __ \______ + User: kind-solo Describe

| < \____ / ___/ + K9s Rev: v0.32.5 Edit Help |____|__ \ /____//____ > + CPU: n/a Jump Owner \/ \/ MEM: n/a -┌─────────────────────────────────────────────────── Pods(all)[27] ────────────────────────────────────────────────────┐ -│ NAMESPACE↑ NAME PF READY RESTARTS STATUS IP │ -│ fullstack-setup console-557956d575-fqctd ● 1/1 0 Running 10.244.0.4 │ -│ fullstack-setup minio-operator-7d575c5f84-j9p6f ● 1/1 0 Running 10.244.0.3 │ -│ kube-system coredns-5d78c9869d-gknqp ● 1/1 0 Running 10.244.0.6 │ -│ kube-system coredns-5d78c9869d-q59pc ● 1/1 0 Running 10.244.0.5 │ -│ kube-system etcd-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kindnet-w9ps5 ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-apiserver-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-controller-manager-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-proxy-p69z8 ● 1/1 0 Running 172.18.0.2 │ -│ kube-system kube-scheduler-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ -│ local-path-storage local-path-provisioner-6bc4bddd6b-8pkfk ● 1/1 0 Running 10.244.0.2 │ -│ solo envoy-proxy-node0-84947f844f-f28tp ● 1/1 0 Running 10.244.0.215 │ -│ solo envoy-proxy-node1-65f8879dcc-j2lrk ● 1/1 0 Running 10.244.0.216 │ -│ solo envoy-proxy-node2-667f848689-dkmf9 ● 1/1 0 Running 10.244.0.214 │ -│ solo fullstack-deployment-grpc-69f9cc5666-lf6ql ● 1/1 0 Running 10.244.0.227 │ -│ solo fullstack-deployment-hedera-explorer-79f79b7df4-wjdct ● 1/1 0 Running 10.244.0.226 │ -│ solo fullstack-deployment-importer-864489ffb8-6v8tk ● 1/1 0 Running 10.244.0.228 │ -│ solo fullstack-deployment-postgres-postgresql-0 ● 1/1 0 Running 10.244.0.232 │ -│ solo fullstack-deployment-rest-584f5cb6bb-q9vnt ● 1/1 0 Running 10.244.0.230 │ -│ solo fullstack-deployment-web3-69dcdfc4fb-mm5pk ● 1/1 0 Running 10.244.0.229 │ -│ solo haproxy-node0-6969f76c77-n5cfl ● 1/1 1 Running 10.244.0.219 │ -│ solo haproxy-node1-59f6976d45-x6xmp ● 1/1 1 Running 10.244.0.217 │ -│ solo haproxy-node2-6df64d5457-hf9ps ● 1/1 1 Running 10.244.0.218 │ -│ solo minio-pool-1-0 ● 2/2 1 Running 10.244.0.224 │ -│ solo network-node0-0 ● 5/5 0 Running 10.244.0.221 │ -│ solo network-node1-0 ● 5/5 0 Running 10.244.0.222 │ -│ solo network-node2-0 ● 5/5 0 Running 10.244.0.220 │ +┌───────────────────────────────────────────────── Pods(all)[31] ─────────────────────────────────────────────────┐ +│ NAMESPACE↑ NAME PF READY STATUS RESTARTS I │ +│ kube-system coredns-5d78c9869d-994t4 ● 1/1 Running 0 1 │ +│ kube-system coredns-5d78c9869d-vgt4q ● 1/1 Running 0 1 │ +│ kube-system etcd-solo-control-plane ● 1/1 Running 0 1 │ +│ kube-system kindnet-q26c9 ● 1/1 Running 0 1 │ +│ kube-system kube-apiserver-solo-control-plane ● 1/1 Running 0 1 │ +│ kube-system kube-controller-manager-solo-control-plane ● 1/1 Running 0 1 │ +│ kube-system kube-proxy-9b27j ● 1/1 Running 0 1 │ +│ kube-system kube-scheduler-solo-control-plane ● 1/1 Running 0 1 │ +│ local-path-storage local-path-provisioner-6bc4bddd6b-4mv8c ● 1/1 Running 0 1 │ +│ solo envoy-proxy-node1-65f8879dcc-rwg97 ● 1/1 Running 0 1 │ +│ solo envoy-proxy-node2-667f848689-628cx ● 1/1 Running 0 1 │ +│ solo envoy-proxy-node3-6bb4b4cbdf-dmwtr ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-grpc-75bb9c6c55-l7kvt ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-hedera-explorer-6565ccb4cb-9dbw2 ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-importer-dd74fd466-vs4mb ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-monitor-54b8f57db9-fn5qq ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-postgres-postgresql-0 ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-redis-node-0 ● 2/2 Running 0 1 │ +│ solo fullstack-deployment-rest-6d48f8dbfc-plbp2 ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-restjava-5d6c4cb648-r597f ● 1/1 Running 0 1 │ +│ solo fullstack-deployment-web3-55fdfbc7f7-lzhfl ● 1/1 Running 0 1 │ +│ solo haproxy-node1-785b9b6f9b-676mr ● 1/1 Running 1 1 │ +│ solo haproxy-node2-644b8c76d-v9mg6 ● 1/1 Running 1 1 │ +│ solo haproxy-node3-fbffdb64-272t2 ● 1/1 Running 1 1 │ +│ solo minio-pool-1-0 ● 2/2 Running 1 1 │ +│ solo network-node1-0 ● 5/5 Running 2 1 │ +│ solo network-node2-0 ● 5/5 Running 2 1 │ +│ solo network-node3-0 ● 5/5 Running 2 1 │ +│ solo relay-node1-node2-node3-hedera-json-rpc-relay-ddd4c8d8b-hdlpb ● 1/1 Running 0 1 │ +│ solo-cluster console-557956d575-c5qp7 ● 1/1 Running 0 1 │ +│ solo-cluster minio-operator-7d575c5f84-xdwwz ● 1/1 Running 0 1 │ +│ │ +└─────────────────────────────────────────────────────────────────────────────────────────────────────────────────┘ + ``` #### Access Hedera Network services @@ -266,12 +279,33 @@ Once the nodes are up, you may now expose various services (using `k9s` (shift-f * Node services: `network--svc` * HAProxy: `haproxy--svc` + ```bash + # enable portforwarding for haproxy + # node1 grpc port accessed by localhost:50211 + kubectl port-forward svc/haproxy-node1-svc -n "${SOLO_NAMESPACE}" 50211:50211 & + # node2 grpc port accessed by localhost:51211 + kubectl port-forward svc/haproxy-node2-svc -n "${SOLO_NAMESPACE}" 51211:50211 & + # node3 grpc port accessed by localhost:52211 + kubectl port-forward svc/haproxy-node3-svc -n "${SOLO_NAMESPACE}" 52211:50211 & + ``` * Envoy Proxy: `envoy-proxy--svc` + ```bash + # enable portforwarding for envoy proxy + kubectl port-forward svc/envoy-proxy-node1-svc -n "${SOLO_NAMESPACE}" 8181:8080 & + kubectl port-forward svc/envoy-proxy-node2-svc -n "${SOLO_NAMESPACE}" 8281:8080 & + kubectl port-forward svc/envoy-proxy-node3-svc -n "${SOLO_NAMESPACE}" 8381:8080 & + ``` * Hedera explorer: `fullstack-deployment-hedera-explorer` + ```bash + #enable portforwarding for hedera explorer, can be access at http://localhost:8080/ + kubectl port-forward svc/fullstack-deployment-hedera-explorer -n "${SOLO_NAMESPACE}" 8080:80 & + ``` * JSON Rpc Relays * You can deploy JSON RPC relays for one or more nodes as below: - ``` - $ solo relay deploy -i node0,node1 + ```bash + solo relay deploy -i node1 + # enable relay for node1 + kubectl port-forward svc/relay-node1-hedera-json-rpc-relay -n "${SOLO_NAMESPACE}" 7546:7546 & ``` Example output @@ -280,103 +314,89 @@ Example output $SOLO_RELAY_DEPLAY_OUTPUT ``` -### Example - 2: Deploy a standalone test network (version `0.47.0-alpha.0`) - -* Initialize `solo` with tag `v0.47.0-alpha.0` and list of node names `node0,node1,node2`: - -``` -# reset .solo directory -$ rm -rf ~/.solo +## For Developers Working on Hedera Service Repo -$ solo init -t v0.47.0-alpha.0 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pem -``` +First, please clone hedera service repo `https://github.com/hashgraph/hedera-services/` and build the code +with `./gradlew assemble`. If need to running nodes with different versions or releases, please duplicate the repo or build directories in +multiple directories, checkout to the respective version and build the code. -* Example output +To set customized `settings.txt` file, edit the file +`~/.solo/cache/templates/settings.txt` after `solo init` command. +Then you can start customized built hedera network with the following command: ``` -$SOLO_INIT_047_OUTPUT -``` - -* Generate `pem` formatted node keys +solo node setup --local-build-path ,node1=,node2= +# example: solo node setup --local-build-path node1=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node3=../hedera-services/hedera-node/data ``` -$ solo node keys --gossip-keys --tls-keys --key-format pem -``` - -* Example output -``` -$SOLO_NODE_KEY_PEM_OUTPUT -``` -PEM key files are generated in `~/.solo/keys` directory. -``` -$ ls ~/.solo/cache/keys -a-private-node0.pem a-public-node1.pem hedera-node1.crt s-private-node0.pem s-public-node1.pem -a-private-node1.pem a-public-node2.pem hedera-node1.key s-private-node1.pem s-public-node2.pem -a-private-node2.pem hedera-node0.crt hedera-node2.crt s-private-node2.pem -a-public-node0.pem hedera-node0.key hedera-node2.key s-public-node0.pem -``` -* Setup cluster with shared components +## For Developers Working on Platform core +To deploy node with local build PTT jar files, run the following command: ``` -$ solo cluster setup +solo node setup --local-build-path ,node1=,node2= --app PlatformTestingTool.jar --app-config -# output is similar to example-1 +# example: solo node setup --local-build-path ../hedera-services/platform-sdk/sdk/data,node1=../hedera-services/platform-sdk/sdk/data,node2=../hedera-services/platform-sdk/sdk/data --app PlatformTestingTool.jar --app-config ../hedera-services/platform-sdk/platform-apps/tests/PlatformTestingTool/src/main/resources/FCMFCQ-Basic-2.5k-5m.json ``` +## Logs +You can find log for running solo command under the directory `~/.solo/logs/` +The file `solo.log` contains the logs for the solo command. +The file `hashgraph-sdk.log` contains the logs from Solo client when sending transactions to network nodes. -In a separate terminal, you may run `k9s` to view the pod status. - -* Deploy helm chart with Hedera network components +## Using IntelliJ remote debug with Solo -``` -$ solo network deploy +NOTE: the hedera-services path referenced '../hedera-services/hedera-node/data' may need to be updated based on what directory you are currently in. This also assumes that you have done an assemble/build and the directory contents are up-to-date. -# output is similar to example-1 +Example 1: attach jvm debugger to a hedera node +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3 --debug-nodeid node2 +solo node setup -i node1,node2,node3 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3 --debug-nodeid node2 ``` -* Setup node with Hedera platform. - * It may take a while (~10 minutes depending on your internet speed) to download various docker images and get the - pods started. +Once you see the following message, you can launch jvm debugger from Intellij ``` -$ solo node setup - -# output is similar to example-1 +❯ Check all nodes are ACTIVE + Check node: node1, + Check node: node3, Please attach JVM debugger now. + Check node: node4, ``` -* Start the nodes +Example 2: attach jvm debugger with node add operation +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3 --pvcs +solo node setup -i node1,node2,node3 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3 +solo node add --gossip-keys --tls-keys --node-id node4 --debug-nodeid node4 --local-build-path ../hedera-services/hedera-node/data ``` -$ solo node start -# output is similar to example-1 -``` -## For Developers Working on Hedera Service Repo +Example 3: attach jvm debugger with node update operation -First, pleaes clone hedera service repo `https://github.com/hashgraph/hedera-services/` and build the code -with `./gradlew assemble`. If need to running nodes with different versions or releases, please duplicate the repo or build directories in -multiple directories, checkout to the respective version and build the code. - -To set customized `settings.txt` file, edit the file -`~/.solo/cache/templates/settings.txt` after `solo init` command. - -Then you can start customized built hedera network with the following command: -``` -solo node setup --local-build-path ,node1=,node2= +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3 +solo node setup -i node1,node2,node3 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3 +solo node update --node-id node2 --debug-nodeid node2 --local-build-path ../hedera-services/hedera-node/data --new-account-number 0.0.7 --gossip-public-key ./s-public-node2.pem --gossip-private-key ./s-private-node2.pem --agreement-public-key ./a-public-node2.pem --agreement-private-key ./a-private-node2.pem ``` -## For Developers Working on Platform core +Example 4: attach jvm debugger with node delete operation -To deploy node with local build PTT jar files, run the following command: -``` -solo node setup --local-build-path ,node1=,node2= - --app PlatformTestingTool.jar --app-config +```bash +./test/e2e/setup-e2e.sh +solo node keys --gossip-keys --tls-keys +solo network deploy -i node1,node2,node3,node4 +solo node setup -i node1,node2,node3,node4 --local-build-path ../hedera-services/hedera-node/data +solo node start -i node1,node2,node3,node4 +solo node delete --node-id node2 --debug-nodeid node3 ``` -## Logs -You can find log for running solo command under the directory `~/.solo/logs/` -The file `solo.log` contains the logs for the solo command. -The file `hashgraph-sdk.log` contains the logs from solo client when sending transactions to network nodes. - ## Support diff --git a/docs/content/_index.md b/docs/content/_index.md new file mode 100644 index 000000000..13e1d34c5 --- /dev/null +++ b/docs/content/_index.md @@ -0,0 +1,44 @@ +*** + +title: Welcome to Solo Documentation +geekdocNav: true +geekdocAlign: center +geekdocAnchor: false +geekdocDescription: Home page for Solo Documentation + +*** + + + + + + + +[![NPM Version](https://img.shields.io/npm/v/%40hashgraph%2Fsolo?logo=npm)](https://www.npmjs.com/package/@hashgraph/solo) +[![GitHub License](https://img.shields.io/github/license/hashgraph/solo?logo=apache\&logoColor=red)](LICENSE) +![node-lts](https://img.shields.io/node/v-lts/%40hashgraph%2Fsolo) +[![Build Application](https://github.com/hashgraph/solo/actions/workflows/flow-build-application.yaml/badge.svg)](https://github.com/hashgraph/solo/actions/workflows/flow-build-application.yaml) +[![Codacy Badge](https://app.codacy.com/project/badge/Grade/83a423a3a1c942459127b3aec62ab0b5)](https://app.codacy.com/gh/hashgraph/solo/dashboard?utm_source=gh\&utm_medium=referral\&utm_content=\&utm_campaign=Badge_grade) +[![codecov](https://codecov.io/gh/hashgraph/solo/graph/badge.svg?token=hBkQdB1XO5)](https://codecov.io/gh/hashgraph/solo) + +Solo is an opinionated CLI tool to deploy and manage standalone test networks. + +{{< button size="large" relref="getting-started/installation.md" >}}Getting Started{{< /button >}} + +## Feature overview + +{{< columns >}} + +### Clean and simple design + +Stay focused on deployment and don't get overwhelmed by a complex design. + +{{< /columns >}} + +{{< columns >}} + +### Easy configuration + +Getting started in minutes. Solo comes with easy to use configuration. + +{{< /columns >}} diff --git a/docs/content/contribution/contribution.md b/docs/content/contribution/contribution.md new file mode 100644 index 000000000..df86715ed --- /dev/null +++ b/docs/content/contribution/contribution.md @@ -0,0 +1,7 @@ +*** + +title: Solo Contribution +weight: -20 +geekdocNav: true +geekdocAlign: center +-------------------- diff --git a/docs/content/contribution/docs.md b/docs/content/contribution/docs.md new file mode 100644 index 000000000..1f02651bb --- /dev/null +++ b/docs/content/contribution/docs.md @@ -0,0 +1,7 @@ +*** + +title: Docs Contribution +weight: -20 +geekdocNav: true +geekdocAlign: center +-------------------- diff --git a/docs/content/getting-started/deploy.md b/docs/content/getting-started/deploy.md new file mode 100644 index 000000000..e0c528189 --- /dev/null +++ b/docs/content/getting-started/deploy.md @@ -0,0 +1,515 @@ +*** + +title: Deploy +weight: -20 +geekdocNav: true +geekdocAlign: center +geekdocAnchor: false +-------------------- + +### Example - 1: Deploy a standalone test network (version `0.42.5`) + +Initialize `solo` with tag `v0.42.5` and list of node names `node0,node1,node2` + +``` +$ solo init -t v0.42.5 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pfx + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Setup home directory and cache +✔ Check dependency: helm [OS: linux, Release: 5.15.0-1061-gke, Arch: x64] +✔ Check dependencies +✔ Setup chart manager + +*************************************************************************************** +Note: solo stores various artifacts (config, logs, keys etc.) in its home directory: /home/runner/.solo +If a full reset is needed, delete the directory or relevant sub-directories before running 'solo init'. +*************************************************************************************** +✔ Copy templates in '/home/runner/.solo/cache' + +``` + +Generate `pfx` formatted node keys + +We need to generate `pfx` keys as `pem` key files are only supported by Hedera platform >=`0.47.0-alpha.0`. + +``` +$ solo node keys --gossip-keys --tls-keys --key-format pfx + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Check keytool exists (Version: 21.0.1+12) +✔ Backup old files +✔ Generate private-node0.pfx for node: node0 +✔ Generate private-node1.pfx for node: node1 +✔ Generate private-node2.pfx for node: node2 +✔ Generate public.pfx file +✔ Clean up temp files +✔ Generate gossip keys +✔ Backup old files +✔ TLS key for node: node2 +✔ TLS key for node: node1 +✔ TLS key for node: node0 +✔ Generate gRPC TLS keys +✔ Finalize + +``` + +Key files are generated in `~/.solo/keys` directory. + +``` +$ ls ~/.solo/cache/keys + +hedera-node0.crt hedera-node1.crt hedera-node2.crt private-node0.pfx private-node2.pfx +hedera-node0.key hedera-node1.key hedera-node2.key private-node1.pfx public.pfx +``` + +Setup cluster with shared components.\ +In a separate terminal, you may run `k9s` to view the pod status. + +``` +$ solo cluster setup +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Prepare chart values +✔ Install 'fullstack-cluster-setup' chart + +``` + +Deploy helm chart with Hedera network components\ +It may take a while (5~15 minutes depending on your internet speed) to download various docker images and get the pods started.\ +If it fails, ensure you have enough resources allocated for Docker engine and retry the command. + +``` +$ solo network deploy + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Install chart 'fullstack-deployment' +✔ Check Node: node0 +✔ Check Node: node1 +✔ Check Node: node2 +✔ Check node pods are running +✔ Check Envoy Proxy for: node0 +✔ Check Envoy Proxy for: node2 +✔ Check Envoy Proxy for: node1 +✔ Check HAProxy for: node0 +✔ Check HAProxy for: node1 +✔ Check HAProxy for: node2 +✔ Check proxy pods are running +✔ Check MinIO +✔ Check auxiliary pods are ready + +``` + +Setup node with Hedera platform software.\ +It may take a while as it download the hedera platform code from + +``` +$ solo node setup + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Check network pod: node0 +✔ Check network pod: node1 +✔ Check network pod: node2 +✔ Identify network pods +✔ Copy configuration files +✔ Copy Gossip keys to staging +✔ Copy gRPC TLS keys to staging +✔ Prepare config.txt for the network +✔ Prepare staging directory +✔ Update node: node0 +✔ Update node: node1 +✔ Update node: node2 +✔ Fetch platform software into network nodes +✔ Copy Gossip keys +✔ Copy Gossip keys +✔ Copy Gossip keys +✔ Copy TLS keys +✔ Copy TLS keys +✔ Copy TLS keys +✔ Copy configuration files +✔ Copy configuration files +✔ Copy configuration files +✔ Set file permissions +✔ Node: node2 +✔ Set file permissions +✔ Node: node0 +✔ Set file permissions +✔ Node: node1 +✔ Setup network nodes +✔ Finalize + +``` + +Start the nodes. + +``` +$ solo node start + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Check network pod: node0 +✔ Check network pod: node2 +✔ Check network pod: node1 +✔ Identify network pods +✔ Start node: node1 +✔ Start node: node0 +✔ Start node: node2 +✔ Starting nodes +✔ Check node: node0 +✔ Check node: node1 +✔ Check node: node2 +✔ Check nodes are ACTIVE +✔ Check proxy for node: node0 +✔ Check proxy for node: node2 +✔ Check proxy for node: node1 +✔ Check node proxies are ACTIVE + +``` + +Deploy mirror node + +``` +$ solo mirror-node deploy + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Prepare address book +✔ Deploy mirror-node +✔ Enable mirror-node +✔ Check Hedera Explorer +✔ Check Postgres DB +✔ Check GRPC +✔ Check Monitor +✔ Check REST API +✔ Check Importer +✔ Check pods are ready + +``` + +Deploy a JSON RPC relay + +``` +$ solo relay deploy + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Prepare chart values +✔ Deploy JSON RPC Relay +✔ Check relay is ready + +``` + +You may view the list of pods using `k9s` as below: + +``` + Context: kind-solo-e2e <0> all Attach … ____ __.________ + Cluster: kind-solo-e2e <1> default Delete

| |/ _/ __ \______ + User: kind-solo-e2e Describe | < \____ / ___/ + K9s Rev: v0.27.4 ⚡️v0.32.4 Edit | | \ / /\___ \ + K8s Rev: v1.27.3 Help |____|__ \ /____//____ > + CPU: n/a Kill \/ \/ + MEM: n/a +┌─────────────────────────────────────────────────── Pods(all)[27] ────────────────────────────────────────────────────┐ +│ NAMESPACE↑ NAME PF READY RESTARTS STATUS IP │ +│ fullstack-setup console-557956d575-fqctd ● 1/1 0 Running 10.244.0.4 │ +│ fullstack-setup minio-operator-7d575c5f84-j9p6f ● 1/1 0 Running 10.244.0.3 │ +│ kube-system coredns-5d78c9869d-gknqp ● 1/1 0 Running 10.244.0.6 │ +│ kube-system coredns-5d78c9869d-q59pc ● 1/1 0 Running 10.244.0.5 │ +│ kube-system etcd-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kindnet-w9ps5 ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-apiserver-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-controller-manager-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-proxy-p69z8 ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-scheduler-solo-e2e-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ local-path-storage local-path-provisioner-6bc4bddd6b-8pkfk ● 1/1 0 Running 10.244.0.2 │ +│ solo envoy-proxy-node0-84947f844f-f28tp ● 1/1 0 Running 10.244.0.215 │ +│ solo envoy-proxy-node1-65f8879dcc-j2lrk ● 1/1 0 Running 10.244.0.216 │ +│ solo envoy-proxy-node2-667f848689-dkmf9 ● 1/1 0 Running 10.244.0.214 │ +│ solo fullstack-deployment-grpc-69f9cc5666-lf6ql ● 1/1 0 Running 10.244.0.227 │ +│ solo fullstack-deployment-hedera-explorer-79f79b7df4-wjdct ● 1/1 0 Running 10.244.0.226 │ +│ solo fullstack-deployment-importer-864489ffb8-6v8tk ● 1/1 0 Running 10.244.0.228 │ +│ solo fullstack-deployment-postgres-postgresql-0 ● 1/1 0 Running 10.244.0.232 │ +│ solo fullstack-deployment-rest-584f5cb6bb-q9vnt ● 1/1 0 Running 10.244.0.230 │ +│ solo fullstack-deployment-web3-69dcdfc4fb-mm5pk ● 1/1 0 Running 10.244.0.229 │ +│ solo haproxy-node0-6969f76c77-n5cfl ● 1/1 1 Running 10.244.0.219 │ +│ solo haproxy-node1-59f6976d45-x6xmp ● 1/1 1 Running 10.244.0.217 │ +│ solo haproxy-node2-6df64d5457-hf9ps ● 1/1 1 Running 10.244.0.218 │ +│ solo minio-pool-1-0 ● 2/2 1 Running 10.244.0.224 │ +│ solo network-node0-0 ● 5/5 0 Running 10.244.0.221 │ +│ solo network-node1-0 ● 5/5 0 Running 10.244.0.222 │ +│ solo network-node2-0 ● 5/5 0 Running 10.244.0.220 │ + +``` + +#### Access Hedera Network services + +Once the nodes are up, you may now expose various services (using `k9s` (shift-f) or `kubectl port-forward`) and access. Below are most used services that you may expose. + +Node services: `network--svc`\ +HAProxy: `haproxy--svc`\ +Envoy Proxy: `envoy-proxy--svc`\ +Hedera explorer: `fullstack-deployment-hedera-explorer`\ +JSON Rpc Relays\ +You can deploy JSON RPC relays for one or more nodes as below: + +``` +$ solo relay deploy -i node0,node1 + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Prepare chart values +✔ Deploy JSON RPC Relay +✔ Check relay is ready + +``` + +### Example - 2: Deploy a standalone test network (version `0.47.0-alpha.0`) + +Initialize `solo` with tag `v0.47.0-alpha.0` and list of node names `node0,node1,node2`: + +``` +# reset .solo directory +$ rm -rf ~/.solo + +$ solo init -t v0.47.0-alpha.0 -i node0,node1,node2 -n "${SOLO_NAMESPACE}" -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" --key-format pem + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Setup home directory and cache +✔ Check dependency: helm [OS: linux, Release: 5.15.0-1061-gke, Arch: x64] +✔ Check dependencies +✔ Setup chart manager + +*************************************************************************************** +Note: solo stores various artifacts (config, logs, keys etc.) in its home directory: /home/runner/.solo +If a full reset is needed, delete the directory or relevant sub-directories before running 'solo init'. +*************************************************************************************** +✔ Copy templates in '/home/runner/.solo/cache' + +``` + +Generate `pem` formatted node keys + +``` +$ solo node keys --gossip-keys --tls-keys --key-format pem + +``` + +Example output + +``` + +******************************* Solo ********************************************* +Version : 0.27.0 +Kubernetes Context : kind-solo +Kubernetes Cluster : kind-solo +Kubernetes Namespace : solo +********************************************************************************** +✔ Initialize +✔ Backup old files +✔ Gossip pem key for node: node0 +✔ Gossip pem key for node: node1 +✔ Gossip pem key for node: node2 +✔ Generate gossip keys +✔ Backup old files +✔ TLS key for node: node0 +✔ TLS key for node: node1 +✔ TLS key for node: node2 +✔ Generate gRPC TLS keys +✔ Finalize + +``` + +PEM key files are generated in `~/.solo/keys` directory. + +``` +$ ls ~/.solo/cache/keys +a-private-node0.pem a-public-node1.pem hedera-node1.crt s-private-node0.pem s-public-node1.pem +a-private-node1.pem a-public-node2.pem hedera-node1.key s-private-node1.pem s-public-node2.pem +a-private-node2.pem hedera-node0.crt hedera-node2.crt s-private-node2.pem +a-public-node0.pem hedera-node0.key hedera-node2.key s-public-node0.pem + +``` + +Setup cluster with shared components + +``` +$ solo cluster setup + +# output is similar to example-1 + +``` + +In a separate terminal, you may run `k9s` to view the pod status. + +Deploy helm chart with Hedera network components + +``` +$ solo network deploy + +# output is similar to example-1 + +``` + +Setup node with Hedera platform.\ +It may take a while (~10 minutes depending on your internet speed) to download various docker images and get the +pods started. + +``` +$ solo node setup + +# output is similar to example-1 + +``` + +Start the nodes + +``` +$ solo node start + +# output is similar to example-1 + +``` + +### For Developers Working on Hedera Service Repo + +First, pleaes clone hedera service repo `https://github.com/hashgraph/hedera-services/` and build the code +with `./gradlew assemble`. If need to running nodes with different versions or releases, please duplicate the repo or build directories in +multiple directories, checkout to the respective version and build the code. + +To set customized `settings.txt` file, edit the file +`~/.solo/cache/templates/settings.txt` after `solo init` command. + +Then you can start customized built hedera network with the following command: + +``` +solo node setup --local-build-path ,node1=,node2= + +``` + +### For Developers Working on Platform core + +To deploy node with local build PTT jar files, run the following command: + +``` +solo node setup --local-build-path ,node1=,node2= + --app PlatformTestingTool.jar --app-config + +``` + +### Logs + +You can find log for running solo command under the directory `~/.solo/logs/` +The file `solo.log` contains the logs for the solo command. +The file `hashgraph-sdk.log` contains the logs from solo client when sending transactions to network nodes. diff --git a/docs/content/getting-started/installation.md b/docs/content/getting-started/installation.md new file mode 100644 index 000000000..d9c08b923 --- /dev/null +++ b/docs/content/getting-started/installation.md @@ -0,0 +1,33 @@ +*** + +title: Installation +weight: -20 +geekdocNav: true +geekdocAlign: center +geekdocAnchor: false +-------------------- + +### Requirements + +Node(>=20.14.0) (*lts/hydrogen*) + +### Setup + +Install [Node](https://nodejs.org/en/download). You may also use [nvm](https://github.com/nvm-sh/nvm) to manage different Node versions locally: + +``` +nvm install lts/hydrogen + +nvm use lts/hydrogen +``` + +### Install Solo + +Run `npm install -g @hashgraph/solo` + +{{< expand "Useful tools" ">" >}} + +Install [kubectl](https://kubernetes.io/docs/tasks/tools/) + +Install [k9s](https://k9scli.io/) +{{< /expand >}} diff --git a/docs/content/getting-started/setup.md b/docs/content/getting-started/setup.md new file mode 100644 index 000000000..7e6bb1edc --- /dev/null +++ b/docs/content/getting-started/setup.md @@ -0,0 +1,82 @@ +*** + +title: Setup +weight: -20 +geekdocNav: true +geekdocAlign: center +geekdocAnchor: false +-------------------- + +### Remote cluster + +You may use remote kubernetes cluster. In this case, ensure kubernetes context is set up correctly. + +``` +kubectl config use-context + +``` + +### Local cluster + +You may use [kind](https://kind.sigs.k8s.io/) or [microk8s](https://microk8s.io/) to create a cluster.\ +In this case, +ensure your Docker engine has enough resources (e.g. Memory >=8Gb, CPU: >=4). Below we show how you can use `kind` to create a cluster + +First, use the following command to set up the environment variables: + +``` +export SOLO_CLUSTER_NAME=solo +export SOLO_NAMESPACE=solo +export SOLO_CLUSTER_SETUP_NAMESPACE=solo-cluster + +``` + +Then run the following command to set the kubectl context to the new cluster: + +```bash +kind create cluster -n "${SOLO_CLUSTER_NAME}" +``` + +Example output + +``` +Creating cluster "solo" ... + ✓ Ensuring node image (kindest/node:v1.29.1) 🖼 + ✓ Preparing nodes 📦 + ✓ Writing configuration 📜 + ✓ Starting control-plane 🕹️ + ✓ Installing CNI 🔌 + ✓ Installing StorageClass 💾 +Set kubectl context to "kind-solo" +You can now use your cluster with: + +kubectl cluster-info --context kind-solo + +Have a nice day! 👋 + +``` + +You may now view pods in your cluster using `k9s -A` as below: + +``` + Context: kind-solo <0> all Attac… ____ __.________ + Cluster: kind-solo <1> default Delete| |/ _/ __ \______ + User: kind-solo Descri| < \____ / ___/ + K9s Rev: v0.27.4 ⚡️v0.32.3 Edit | | \ / /\___ \ + K8s Rev: v1.27.3 Help |____|__ \ /____//____ > + CPU: n/a Kill \/ \/ + MEM: n/a +┌───────────────────────────────────────────── Pods(all)[9] ─────────────────────────────────────────────┐ +│ NAMESPACE↑ NAME PF READY RESTARTS STATUS IP │ +│ kube-system coredns-5d78c9869d-kc27p ● 1/1 0 Running 10.244.0.4 │ +│ kube-system coredns-5d78c9869d-r8mzz ● 1/1 0 Running 10.244.0.3 │ +│ kube-system etcd-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kindnet-gppbk ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-apiserver-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-controller-manager-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-proxy-wb9w5 ● 1/1 0 Running 172.18.0.2 │ +│ kube-system kube-scheduler-solo-control-plane ● 1/1 0 Running 172.18.0.2 │ +│ local-path-storage local-path-provisioner-6bc4bddd6b-5vh5d ● 1/1 0 Running 10.244.0.2 │ +│ │ +---------------------------------------------------------------------------------------------------------- +``` diff --git a/docs/data/menu/main.yml b/docs/data/menu/main.yml index c79e21b65..4a3b1b4ab 100644 --- a/docs/data/menu/main.yml +++ b/docs/data/menu/main.yml @@ -1,9 +1,19 @@ --- main: - - name: User - ref: "/User/README.md" - - name: Developer - ref: "/Developer/DEV.md" + - name: Getting Started + sub: + - name: Installation + ref: "/getting-started/installation.md" + - name: Setup + ref: "/getting-started/setup.md" + - name: Deploy + ref: "/getting-started/deploy.md" + - name: Contribution + sub: + - name: Solo + ref: "/contribution/contribution.md" + - name: Docs + ref: "/contribution/docs.md" - name: Classes ref: "/solo/static/Classes/index.html" external: true diff --git a/docs/data/menu/more.yml b/docs/data/menu/more.yml index fba3b6b1e..df5548ab9 100644 --- a/docs/data/menu/more.yml +++ b/docs/data/menu/more.yml @@ -4,8 +4,12 @@ more: - name: Hedera Hashgraph ref: "https://hedera.com/" external: true + - name: Releases + ref: "https://github.com/hashgraph/solo/releases" + external: true + icon: "gdoc_download" - name: "View Source" - ref: "https://github.com/hashgraph/solo/issues" + ref: "https://github.com/hashgraph/solo" external: true icon: "gdoc_github" diff --git a/docs/hugo.toml b/docs/hugo.toml index b6434aaf4..23e2f2fd5 100644 --- a/docs/hugo.toml +++ b/docs/hugo.toml @@ -1,6 +1,6 @@ baseURL = 'https://hashgraph.github.io/solo' languageCode = 'en-us' -title = 'Solo Documentation' +title = 'Solo' theme = "hugo-geekdoc" # Geekdoc required configuration diff --git a/jest.config.mjs b/jest.config.mjs index 341553719..5b2bbd9b4 100644 --- a/jest.config.mjs +++ b/jest.config.mjs @@ -19,7 +19,8 @@ const config = { moduleFileExtensions: ['js', 'mjs'], verbose: true, reporters: [['default', { summaryThreshold: 1 }], 'jest-junit'], - testSequencer: './test/testSequencer.mjs' + testSequencer: './test/testSequencer.mjs', + setupFilesAfterEnv: ['jest-expect-message'] } export default config diff --git a/package-lock.json b/package-lock.json index 04b94c678..b5b482b0a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@hashgraph/solo", - "version": "0.27.0", + "version": "0.30.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@hashgraph/solo", - "version": "0.27.0", + "version": "0.30.1", "license": "Apache2.0", "os": [ "darwin", @@ -14,30 +14,30 @@ "win32" ], "dependencies": { - "@hashgraph/proto": "^2.15.0", - "@hashgraph/sdk": "^2.47.0", - "@kubernetes/client-node": "^0.21.0", - "@listr2/prompt-adapter-enquirer": "^2.0.8", - "@peculiar/x509": "^1.11.0", - "adm-zip": "^0.5.14", + "@hashgraph/sdk": "^2.51.0", + "@kubernetes/client-node": "^0.22.0", + "@listr2/prompt-adapter-enquirer": "^2.0.11", + "@peculiar/x509": "^1.12.2", + "adm-zip": "^0.5.16", "chalk": "^5.3.0", "dot-object": "^2.1.5", "dotenv": "^16.4.5", "enquirer": "^2.4.1", "esm": "^3.2.25", "figlet": "^1.7.0", - "got": "^14.4.1", - "inquirer": "^9.3.1", + "got": "^14.4.2", + "inquirer": "^11.1.0", "ip": "^2.0.1", "js-base64": "^3.7.7", "js-yaml": "^4.1.0", - "listr2": "^8.2.1", - "semver": "^7.6.2", - "stream-buffers": "^3.0.2", - "tar": "^7.2.0", + "jsdoc": "^4.0.3", + "listr2": "^8.2.4", + "semver": "^7.6.3", + "stream-buffers": "^3.0.3", + "tar": "^7.4.3", "uuid": "^10.0.0", - "winston": "^3.13.0", - "yaml": "^2.4.5", + "winston": "^3.14.2", + "yaml": "^2.5.1", "yargs": "^17.7.2" }, "bin": { @@ -47,16 +47,17 @@ "@jest/globals": "^29.7.0", "@jest/test-sequencer": "^29.7.0", "cross-env": "^7.0.3", - "eslint": "^8.57.0", + "eslint": "^8.57.1", "eslint-config-standard": "^17.1.0", "eslint-plugin-headers": "^1.1.2", - "eslint-plugin-import": "^2.29.1", + "eslint-plugin-import": "^2.30.0", "eslint-plugin-n": "^16.6.2", - "eslint-plugin-promise": "^6.2.0", + "eslint-plugin-promise": "^6.6.0", "jest": "^29.7.0", "jest-environment-steps": "^1.1.1", + "jest-expect-message": "^1.1.3", "jest-junit": "^16.0.0", - "nyc": "^17.0.0", + "nyc": "^17.1.0", "remark-cli": "^12.0.1", "remark-lint-list-item-indent": "^4.0.0", "remark-lint-unordered-list-marker-style": "^4.0.0", @@ -416,7 +417,6 @@ "version": "7.24.7", "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", - "dev": true, "bin": { "parser": "bin/babel-parser.js" }, @@ -749,9 +749,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", - "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -1166,9 +1166,9 @@ "integrity": "sha512-lcHwpNoggQTObv5apGNCTdJrO69eHOZMi4BNC+rTLER8iHAqGrUVeLh/irVIM7zTw2bOXA8T6uNPeujwOLg/2Q==" }, "node_modules/@hashgraph/cryptography": { - "version": "1.4.8-beta.5", - "resolved": "https://registry.npmjs.org/@hashgraph/cryptography/-/cryptography-1.4.8-beta.5.tgz", - "integrity": "sha512-soq2vGLRkdl2Evr+gIvIjCXJjqA1hOAjysBGG+dhP6tKx2PEgEjb3hON/sMbxm3Q4qQdkML/vEthdAV707+flw==", + "version": "1.4.8-beta.8", + "resolved": "https://registry.npmjs.org/@hashgraph/cryptography/-/cryptography-1.4.8-beta.8.tgz", + "integrity": "sha512-RK1SL5B6IGsYM4HyepC24rsMGr1qOvHFbNiJPlK+AGV5lApjxGpyNVWC80GusYqwRD9B1ljw43wJBSbHdaZIgw==", "dependencies": { "asn1js": "^3.0.5", "bignumber.js": "^9.1.1", @@ -1202,33 +1202,10 @@ } } }, - "node_modules/@hashgraph/cryptography/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/@hashgraph/proto": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/@hashgraph/proto/-/proto-2.15.0.tgz", - "integrity": "sha512-ULSNIwQZIroTssrEfNoUcIcWEJ9BIwKZiAsaRvJ2+Rr3XIr+np7UXv6sEkJU+jSyzk97LrTdiRAoc/hJO9Vx8Q==", + "version": "2.15.0-beta.4", + "resolved": "https://registry.npmjs.org/@hashgraph/proto/-/proto-2.15.0-beta.4.tgz", + "integrity": "sha512-da51j1RCHm+uXpQNM0KJ7qbhUJLTp6Avw8GdL+PQCbZ4lBwKAo8jjJ5rRjf1odsN1+zKl+JF7SMmKZB8PY229Q==", "dependencies": { "long": "^4.0.0", "protobufjs": "^7.2.5" @@ -1238,17 +1215,17 @@ } }, "node_modules/@hashgraph/sdk": { - "version": "2.47.0", - "resolved": "https://registry.npmjs.org/@hashgraph/sdk/-/sdk-2.47.0.tgz", - "integrity": "sha512-a7YS+9MXuSft/rtASSFT0rk44bQ1glPeSYVCTg9VYmtjHR37b24xkS8bV821uV7+F6YgR7H5Z9Ru7czG02I8Tw==", + "version": "2.51.0", + "resolved": "https://registry.npmjs.org/@hashgraph/sdk/-/sdk-2.51.0.tgz", + "integrity": "sha512-+RtBs8wmPr9g93fDSMCnQnAX27w+i5itw0bbYDFiAcFZ0F3Vb+TyxdPw7jfcHRgFDvwkyblEsPBzG7DO2lt5Ow==", "dependencies": { "@ethersproject/abi": "^5.7.0", "@ethersproject/bignumber": "^5.7.0", "@ethersproject/bytes": "^5.7.0", "@ethersproject/rlp": "^5.7.0", "@grpc/grpc-js": "1.8.2", - "@hashgraph/cryptography": "1.4.8-beta.5", - "@hashgraph/proto": "2.15.0-beta.1", + "@hashgraph/cryptography": "1.4.8-beta.8", + "@hashgraph/proto": "2.15.0-beta.4", "axios": "^1.6.4", "bignumber.js": "^9.1.1", "bn.js": "^5.1.1", @@ -1273,25 +1250,14 @@ } } }, - "node_modules/@hashgraph/sdk/node_modules/@hashgraph/proto": { - "version": "2.15.0-beta.1", - "resolved": "https://registry.npmjs.org/@hashgraph/proto/-/proto-2.15.0-beta.1.tgz", - "integrity": "sha512-gNt+MBBnVyFpsAr+ac6meCVLazfdM4tVuNdJAnwXyCVFIiYfaGUz4WwAjBdQwzVgIbN7vaqgksynqj37pUwHtw==", - "dependencies": { - "long": "^4.0.0", - "protobufjs": "^7.2.5" - }, - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.14", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", - "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", + "deprecated": "Use @eslint/config-array instead", "dev": true, "dependencies": { - "@humanwhocodes/object-schema": "^2.0.2", + "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" }, @@ -1313,15 +1279,225 @@ } }, "node_modules/@humanwhocodes/object-schema": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", - "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz", + "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==", + "deprecated": "Use @eslint/object-schema instead", "dev": true }, + "node_modules/@inquirer/checkbox": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/checkbox/-/checkbox-3.0.1.tgz", + "integrity": "sha512-0hm2nrToWUdD6/UHnel/UKGdk1//ke5zGUpHIvk5ZWmaKezlGxZkOJXNSWsdxO/rEqTkbB3lNC2J6nBElV2aAQ==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/figures": "^1.0.6", + "@inquirer/type": "^2.0.0", + "ansi-escapes": "^4.3.2", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/confirm": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-4.0.1.tgz", + "integrity": "sha512-46yL28o2NJ9doViqOy0VDcoTzng7rAb6yPQKU7VDLqkmbCaH4JqK4yk4XqlzNWy9PVC5pG1ZUXPBQv+VqnYs2w==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/core": { + "version": "9.2.1", + "resolved": "https://registry.npmjs.org/@inquirer/core/-/core-9.2.1.tgz", + "integrity": "sha512-F2VBt7W/mwqEU4bL0RnHNZmC/OxzNx9cOYxHqnXX3MP6ruYvZUZAW9imgN9+h/uBT/oP8Gh888J2OZSbjSeWcg==", + "dependencies": { + "@inquirer/figures": "^1.0.6", + "@inquirer/type": "^2.0.0", + "@types/mute-stream": "^0.0.4", + "@types/node": "^22.5.5", + "@types/wrap-ansi": "^3.0.0", + "ansi-escapes": "^4.3.2", + "cli-width": "^4.1.0", + "mute-stream": "^1.0.0", + "signal-exit": "^4.1.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^6.2.0", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/core/node_modules/@types/node": { + "version": "22.5.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.5.tgz", + "integrity": "sha512-Xjs4y5UPO/CLdzpgR6GirZJx36yScjh73+2NlLlkFRSoQN8B0DpfXPdZGnvVmLRLOsqDpOfTNv7D9trgGhmOIA==", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@inquirer/core/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@inquirer/core/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + }, + "node_modules/@inquirer/editor": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/editor/-/editor-3.0.1.tgz", + "integrity": "sha512-VA96GPFaSOVudjKFraokEEmUQg/Lub6OXvbIEZU1SDCmBzRkHGhxoFAVaF30nyiB4m5cEbDgiI2QRacXZ2hw9Q==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0", + "external-editor": "^3.1.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/expand": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/expand/-/expand-3.0.1.tgz", + "integrity": "sha512-ToG8d6RIbnVpbdPdiN7BCxZGiHOTomOX94C2FaT5KOHupV40tKEDozp12res6cMIfRKrXLJyexAZhWVHgbALSQ==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, "node_modules/@inquirer/figures": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.3.tgz", - "integrity": "sha512-ErXXzENMH5pJt5/ssXV0DfWUZqly8nGzf0UcBV9xTnP+KyffE2mqyxIMBrZ8ijQck2nU0TQm40EQB53YreyWHw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.6.tgz", + "integrity": "sha512-yfZzps3Cso2UbM7WlxKwZQh2Hs6plrbjs1QnzQDZhK2DgyCo6D8AaHps9olkNcUFlcYERMqU3uJSp1gmy3s/qQ==", + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/input": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/input/-/input-3.0.1.tgz", + "integrity": "sha512-BDuPBmpvi8eMCxqC5iacloWqv+5tQSJlUafYWUe31ow1BVXjW2a5qe3dh4X/Z25Wp22RwvcaLCc2siHobEOfzg==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/number": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/number/-/number-2.0.1.tgz", + "integrity": "sha512-QpR8jPhRjSmlr/mD2cw3IR8HRO7lSVOnqUvQa8scv1Lsr3xoAMMworcYW3J13z3ppjBFBD2ef1Ci6AE5Qn8goQ==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/password": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/password/-/password-3.0.1.tgz", + "integrity": "sha512-haoeEPUisD1NeE2IanLOiFr4wcTXGWrBOyAyPZi1FfLJuXOzNmxCJPgUrGYKVh+Y8hfGJenIfz5Wb/DkE9KkMQ==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0", + "ansi-escapes": "^4.3.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/prompts": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/prompts/-/prompts-6.0.1.tgz", + "integrity": "sha512-yl43JD/86CIj3Mz5mvvLJqAOfIup7ncxfJ0Btnl0/v5TouVUyeEdcpknfgc+yMevS/48oH9WAkkw93m7otLb/A==", + "dependencies": { + "@inquirer/checkbox": "^3.0.1", + "@inquirer/confirm": "^4.0.1", + "@inquirer/editor": "^3.0.1", + "@inquirer/expand": "^3.0.1", + "@inquirer/input": "^3.0.1", + "@inquirer/number": "^2.0.1", + "@inquirer/password": "^3.0.1", + "@inquirer/rawlist": "^3.0.1", + "@inquirer/search": "^2.0.1", + "@inquirer/select": "^3.0.1" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/rawlist": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/rawlist/-/rawlist-3.0.1.tgz", + "integrity": "sha512-VgRtFIwZInUzTiPLSfDXK5jLrnpkuSOh1ctfaoygKAdPqjcjKYmGh6sCY1pb0aGnCGsmhUxoqLDUAU0ud+lGXQ==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/type": "^2.0.0", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/search": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/search/-/search-2.0.1.tgz", + "integrity": "sha512-r5hBKZk3g5MkIzLVoSgE4evypGqtOannnB3PKTG9NRZxyFRKcfzrdxXXPcoJQsxJPzvdSU2Rn7pB7lw0GCmGAg==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/figures": "^1.0.6", + "@inquirer/type": "^2.0.0", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/select": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@inquirer/select/-/select-3.0.1.tgz", + "integrity": "sha512-lUDGUxPhdWMkN/fHy1Lk7pF3nK1fh/gqeyWXmctefhxLYxlDsc7vsPBEpxrfVGDsVdyYJsiJoD4bJ1b623cV1Q==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/figures": "^1.0.6", + "@inquirer/type": "^2.0.0", + "ansi-escapes": "^4.3.2", + "yoctocolors-cjs": "^2.1.2" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@inquirer/type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@inquirer/type/-/type-2.0.0.tgz", + "integrity": "sha512-XvJRx+2KR3YXyYtPUUy+qd9i7p+GO9Ko6VIIpWlBrpWwXDv8WLFeHTxz35CfQFUiBMLXlGHhGzys7lqit9gWag==", + "dependencies": { + "mute-stream": "^1.0.0" + }, "engines": { "node": ">=18" } @@ -1879,19 +2055,52 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, + "node_modules/@jsdoc/salty": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/@jsdoc/salty/-/salty-0.2.8.tgz", + "integrity": "sha512-5e+SFVavj1ORKlKaKr2BmTOekmXbelU7dC0cDkQLqag7xfuTPuGMUFx7KWJuv4bYZrTsoL2Z18VVCOKYxzoHcg==", + "dependencies": { + "lodash": "^4.17.21" + }, + "engines": { + "node": ">=v12.0.0" + } + }, + "node_modules/@jsep-plugin/assignment": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jsep-plugin/assignment/-/assignment-1.2.1.tgz", + "integrity": "sha512-gaHqbubTi29aZpVbBlECRpmdia+L5/lh2BwtIJTmtxdbecEyyX/ejAOg7eQDGNvGOUmPY7Z2Yxdy9ioyH/VJeA==", + "engines": { + "node": ">= 10.16.0" + }, + "peerDependencies": { + "jsep": "^0.4.0||^1.0.0" + } + }, + "node_modules/@jsep-plugin/regex": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/@jsep-plugin/regex/-/regex-1.0.3.tgz", + "integrity": "sha512-XfZgry4DwEZvSFtS/6Y+R48D7qJYJK6R9/yJFyUFHCIUMEEHuJ4X95TDgJp5QkmzfLYvapMPzskV5HpIDrREug==", + "engines": { + "node": ">= 10.16.0" + }, + "peerDependencies": { + "jsep": "^0.4.0||^1.0.0" + } + }, "node_modules/@kubernetes/client-node": { - "version": "0.21.0", - "resolved": "https://registry.npmjs.org/@kubernetes/client-node/-/client-node-0.21.0.tgz", - "integrity": "sha512-yYRbgMeyQbvZDHt/ZqsW3m4lRefzhbbJEuj8sVXM+bufKrgmzriA2oq7lWPH/k/LQIicAME9ixPUadTrxIF6dQ==", + "version": "0.22.0", + "resolved": "https://registry.npmjs.org/@kubernetes/client-node/-/client-node-0.22.0.tgz", + "integrity": "sha512-K86G5S/V+qMmg/Ht26CtEvTbedsD1u6RaYfIR4V4DphyNBLc3rY20mFNvXVE43MFbQrd5rDOvtOjTCsaVoBiEg==", "dependencies": { "@types/js-yaml": "^4.0.1", - "@types/node": "^20.1.1", + "@types/node": "^22.0.0", "@types/request": "^2.47.1", "@types/ws": "^8.5.3", "byline": "^5.0.0", "isomorphic-ws": "^5.0.0", "js-yaml": "^4.1.0", - "jsonpath-plus": "^8.0.0", + "jsonpath-plus": "^9.0.0", "request": "^2.88.0", "rfc4648": "^1.3.0", "stream-buffers": "^3.0.2", @@ -1903,10 +2112,23 @@ "openid-client": "^5.3.0" } }, + "node_modules/@kubernetes/client-node/node_modules/@types/node": { + "version": "22.5.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.5.5.tgz", + "integrity": "sha512-Xjs4y5UPO/CLdzpgR6GirZJx36yScjh73+2NlLlkFRSoQN8B0DpfXPdZGnvVmLRLOsqDpOfTNv7D9trgGhmOIA==", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@kubernetes/client-node/node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + }, "node_modules/@listr2/prompt-adapter-enquirer": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/@listr2/prompt-adapter-enquirer/-/prompt-adapter-enquirer-2.0.8.tgz", - "integrity": "sha512-Z0i6ZVBgVW4k+chMJ61fh9H6S6P+8YHxzUzK13lxY0iv+Qky5OAeP6Qas+HlrVsLrdTgNZJTIyg968WP62hoBw==", + "version": "2.0.11", + "resolved": "https://registry.npmjs.org/@listr2/prompt-adapter-enquirer/-/prompt-adapter-enquirer-2.0.11.tgz", + "integrity": "sha512-4xmQnYzB06d1iiutJdTZyEcvCR9RVC6QehogRqDQckBNfwD7i+6hEntFVAtewI1/52lT3HkgKptJeFwCwmjX0g==", "engines": { "node": ">=18.0.0" }, @@ -2039,93 +2261,93 @@ } }, "node_modules/@peculiar/asn1-cms": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.3.8.tgz", - "integrity": "sha512-Wtk9R7yQxGaIaawHorWKP2OOOm/RZzamOmSWwaqGphIuU6TcKYih0slL6asZlSSZtVoYTrBfrddSOD/jTu9vuQ==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-cms/-/asn1-cms-2.3.13.tgz", + "integrity": "sha512-joqu8A7KR2G85oLPq+vB+NFr2ro7Ls4ol13Zcse/giPSzUNN0n2k3v8kMpf6QdGUhI13e5SzQYN8AKP8sJ8v4w==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", - "@peculiar/asn1-x509-attr": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "@peculiar/asn1-x509-attr": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-csr": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-csr/-/asn1-csr-2.3.8.tgz", - "integrity": "sha512-ZmAaP2hfzgIGdMLcot8gHTykzoI+X/S53x1xoGbTmratETIaAbSWMiPGvZmXRA0SNEIydpMkzYtq4fQBxN1u1w==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-csr/-/asn1-csr-2.3.13.tgz", + "integrity": "sha512-+JtFsOUWCw4zDpxp1LbeTYBnZLlGVOWmHHEhoFdjM5yn4wCn+JiYQ8mghOi36M2f6TPQ17PmhNL6/JfNh7/jCA==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-ecc": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-ecc/-/asn1-ecc-2.3.8.tgz", - "integrity": "sha512-Ah/Q15y3A/CtxbPibiLM/LKcMbnLTdUdLHUgdpB5f60sSvGkXzxJCu5ezGTFHogZXWNX3KSmYqilCrfdmBc6pQ==", + "version": "2.3.14", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-ecc/-/asn1-ecc-2.3.14.tgz", + "integrity": "sha512-zWPyI7QZto6rnLv6zPniTqbGaLh6zBpJyI46r1yS/bVHJXT2amdMHCRRnbV5yst2H8+ppXG6uXu/M6lKakiQ8w==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-pfx": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-pfx/-/asn1-pfx-2.3.8.tgz", - "integrity": "sha512-XhdnCVznMmSmgy68B9pVxiZ1XkKoE1BjO4Hv+eUGiY1pM14msLsFZ3N7K46SoITIVZLq92kKkXpGiTfRjlNLyg==", - "dependencies": { - "@peculiar/asn1-cms": "^2.3.8", - "@peculiar/asn1-pkcs8": "^2.3.8", - "@peculiar/asn1-rsa": "^2.3.8", - "@peculiar/asn1-schema": "^2.3.8", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-pfx/-/asn1-pfx-2.3.13.tgz", + "integrity": "sha512-fypYxjn16BW+5XbFoY11Rm8LhZf6euqX/C7BTYpqVvLem1GvRl7A+Ro1bO/UPwJL0z+1mbvXEnkG0YOwbwz2LA==", + "dependencies": { + "@peculiar/asn1-cms": "^2.3.13", + "@peculiar/asn1-pkcs8": "^2.3.13", + "@peculiar/asn1-rsa": "^2.3.13", + "@peculiar/asn1-schema": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-pkcs8": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs8/-/asn1-pkcs8-2.3.8.tgz", - "integrity": "sha512-rL8k2x59v8lZiwLRqdMMmOJ30GHt6yuHISFIuuWivWjAJjnxzZBVzMTQ72sknX5MeTSSvGwPmEFk2/N8+UztFQ==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs8/-/asn1-pkcs8-2.3.13.tgz", + "integrity": "sha512-VP3PQzbeSSjPjKET5K37pxyf2qCdM0dz3DJ56ZCsol3FqAXGekb4sDcpoL9uTLGxAh975WcdvUms9UcdZTuGyQ==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-pkcs9": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs9/-/asn1-pkcs9-2.3.8.tgz", - "integrity": "sha512-+nONq5tcK7vm3qdY7ZKoSQGQjhJYMJbwJGbXLFOhmqsFIxEWyQPHyV99+wshOjpOjg0wUSSkEEzX2hx5P6EKeQ==", - "dependencies": { - "@peculiar/asn1-cms": "^2.3.8", - "@peculiar/asn1-pfx": "^2.3.8", - "@peculiar/asn1-pkcs8": "^2.3.8", - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", - "@peculiar/asn1-x509-attr": "^2.3.8", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-pkcs9/-/asn1-pkcs9-2.3.13.tgz", + "integrity": "sha512-rIwQXmHpTo/dgPiWqUgby8Fnq6p1xTJbRMxCiMCk833kQCeZrC5lbSKg6NDnJTnX2kC6IbXBB9yCS2C73U2gJg==", + "dependencies": { + "@peculiar/asn1-cms": "^2.3.13", + "@peculiar/asn1-pfx": "^2.3.13", + "@peculiar/asn1-pkcs8": "^2.3.13", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", + "@peculiar/asn1-x509-attr": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-rsa": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-rsa/-/asn1-rsa-2.3.8.tgz", - "integrity": "sha512-ES/RVEHu8VMYXgrg3gjb1m/XG0KJWnV4qyZZ7mAg7rrF3VTmRbLxO8mk+uy0Hme7geSMebp+Wvi2U6RLLEs12Q==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-rsa/-/asn1-rsa-2.3.13.tgz", + "integrity": "sha512-wBNQqCyRtmqvXkGkL4DR3WxZhHy8fDiYtOjTeCd7SFE5F6GBeafw3EJ94PX/V0OJJrjQ40SkRY2IZu3ZSyBqcg==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/asn1-schema": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz", - "integrity": "sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.13.tgz", + "integrity": "sha512-3Xq3a01WkHRZL8X04Zsfg//mGaA21xlL4tlVn4v2xGT0JStiztATRkMwa5b+f/HXmY2smsiLXYK46Gwgzvfg3g==", "dependencies": { "asn1js": "^3.0.5", "pvtsutils": "^1.3.5", @@ -2133,11 +2355,11 @@ } }, "node_modules/@peculiar/asn1-x509": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509/-/asn1-x509-2.3.8.tgz", - "integrity": "sha512-voKxGfDU1c6r9mKiN5ZUsZWh3Dy1BABvTM3cimf0tztNwyMJPhiXY94eRTgsMQe6ViLfT6EoXxkWVzcm3mFAFw==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509/-/asn1-x509-2.3.13.tgz", + "integrity": "sha512-PfeLQl2skXmxX2/AFFCVaWU8U6FKW1Db43mgBhShCOFS1bVxqtvusq1hVjfuEcuSQGedrLdCSvTgabluwN/M9A==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", "asn1js": "^3.0.5", "ipaddr.js": "^2.1.0", "pvtsutils": "^1.3.5", @@ -2145,31 +2367,31 @@ } }, "node_modules/@peculiar/asn1-x509-attr": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509-attr/-/asn1-x509-attr-2.3.8.tgz", - "integrity": "sha512-4Z8mSN95MOuX04Aku9BUyMdsMKtVQUqWnr627IheiWnwFoheUhX3R4Y2zh23M7m80r4/WG8MOAckRKc77IRv6g==", + "version": "2.3.13", + "resolved": "https://registry.npmjs.org/@peculiar/asn1-x509-attr/-/asn1-x509-attr-2.3.13.tgz", + "integrity": "sha512-WpEos6CcnUzJ6o2Qb68Z7Dz5rSjRGv/DtXITCNBtjZIRWRV12yFVci76SVfOX8sisL61QWMhpLKQibrG8pi2Pw==", "dependencies": { - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", "asn1js": "^3.0.5", "tslib": "^2.6.2" } }, "node_modules/@peculiar/x509": { - "version": "1.11.0", - "resolved": "https://registry.npmjs.org/@peculiar/x509/-/x509-1.11.0.tgz", - "integrity": "sha512-8rdxE//tsWLb2Yo2TYO2P8gieStbrHK/huFMV5PPfwX8I5HmtOus+Ox6nTKrPA9o+WOPaa5xKenee+QdmHBd5g==", - "dependencies": { - "@peculiar/asn1-cms": "^2.3.8", - "@peculiar/asn1-csr": "^2.3.8", - "@peculiar/asn1-ecc": "^2.3.8", - "@peculiar/asn1-pkcs9": "^2.3.8", - "@peculiar/asn1-rsa": "^2.3.8", - "@peculiar/asn1-schema": "^2.3.8", - "@peculiar/asn1-x509": "^2.3.8", + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/@peculiar/x509/-/x509-1.12.2.tgz", + "integrity": "sha512-a99Y4Kg40c8QJrPz9cl2L2Ocmn8nVGp9vrHa725h56hRIaGPSr1IzSgx9wTolHve+D5t1kFe5M7aEJJUuRnj8A==", + "dependencies": { + "@peculiar/asn1-cms": "^2.3.13", + "@peculiar/asn1-csr": "^2.3.13", + "@peculiar/asn1-ecc": "^2.3.14", + "@peculiar/asn1-pkcs9": "^2.3.13", + "@peculiar/asn1-rsa": "^2.3.13", + "@peculiar/asn1-schema": "^2.3.13", + "@peculiar/asn1-x509": "^2.3.13", "pvtsutils": "^1.3.5", "reflect-metadata": "^0.2.2", - "tslib": "^2.6.2", + "tslib": "^2.7.0", "tsyringe": "^4.8.0" } }, @@ -2236,6 +2458,12 @@ "resolved": "https://registry.npmjs.org/@protobufjs/utf8/-/utf8-1.1.0.tgz", "integrity": "sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==" }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true + }, "node_modules/@sec-ant/readable-stream": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/@sec-ant/readable-stream/-/readable-stream-0.4.1.tgz", @@ -2248,11 +2476,11 @@ "dev": true }, "node_modules/@sindresorhus/is": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-6.3.1.tgz", - "integrity": "sha512-FX4MfcifwJyFOI2lPoX7PQxCqx8BG1HCho7WdiXwpEQx1Ycij0JxkfYtGK7yqNScrZGSlt6RE6sw8QYoH7eKnQ==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-7.0.0.tgz", + "integrity": "sha512-WDTlVTyvFivSOuyvMeedzg2hdoBLZ3f1uNVuEida2Rl9BrfjrIRjWA/VZIrMRLvSwJYCAlCRA3usDt1THytxWQ==", "engines": { - "node": ">=16" + "node": ">=18" }, "funding": { "url": "https://github.com/sindresorhus/is?sponsor=1" @@ -2430,6 +2658,20 @@ "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==", "dev": true }, + "node_modules/@types/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==" + }, + "node_modules/@types/markdown-it": { + "version": "14.1.2", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.2.tgz", + "integrity": "sha512-promo4eFwuiW+TfGxhi+0x3czqTYJkG8qB17ZUJiVF10Xm7NLVRSLUsfRTU/6h1e24VvRnXCx+hG7li58lkzog==", + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } + }, "node_modules/@types/mdast": { "version": "4.0.3", "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.3.tgz", @@ -2439,18 +2681,31 @@ "@types/unist": "*" } }, + "node_modules/@types/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==" + }, "node_modules/@types/ms": { "version": "0.7.34", "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==", "dev": true }, + "node_modules/@types/mute-stream": { + "version": "0.0.4", + "resolved": "https://registry.npmjs.org/@types/mute-stream/-/mute-stream-0.0.4.tgz", + "integrity": "sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/node": { - "version": "20.8.7", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.8.7.tgz", - "integrity": "sha512-21TKHHh3eUHIi2MloeptJWALuCu5H7HQTdTrWIFReA8ad+aggoX+lRes3ex7/FtpC+sVUpFMQ+QTfYr74mruiQ==", + "version": "20.14.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.11.tgz", + "integrity": "sha512-kprQpL8MMeszbz6ojB5/tU8PLN4kesnN8Gjzw349rDlNgsSzg90lAVj3llK99Dh7JON+t9AuscPPFW6mPbTnSA==", "dependencies": { - "undici-types": "~5.25.1" + "undici-types": "~5.26.4" } }, "node_modules/@types/request": { @@ -2511,6 +2766,11 @@ "integrity": "sha512-dqId9J8K/vGi5Zr7oo212BGii5m3q5Hxlkwy3WpYuKPklmBEvsbMYYyLxAQpSffdLl/gdW0XUpKWFvYmyoWCoQ==", "dev": true }, + "node_modules/@types/wrap-ansi": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz", + "integrity": "sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g==" + }, "node_modules/@types/ws": { "version": "8.5.10", "resolved": "https://registry.npmjs.org/@types/ws/-/ws-8.5.10.tgz", @@ -2582,9 +2842,9 @@ } }, "node_modules/adm-zip": { - "version": "0.5.14", - "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.14.tgz", - "integrity": "sha512-DnyqqifT4Jrcvb8USYjp6FHtBpEIz1mnXu6pTRHZ0RL69LbQYiO+0lDFg5+OKA7U29oWSs3a/i8fhn8ZcceIWg==", + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.16.tgz", + "integrity": "sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==", "engines": { "node": ">=12.0" } @@ -2698,28 +2958,32 @@ "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==" }, "node_modules/array-buffer-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.0.tgz", - "integrity": "sha512-LPuwb2P+NrQw3XhxGc36+XSvuBPopovXYTR9Ew++Du9Yb/bx5AzBfrIsBoj0EZUifjQU+sHL21sseZ3jerWO/A==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.1.tgz", + "integrity": "sha512-ahC5W1xgou+KTXix4sAO8Ki12Q+jf4i0+tmk3sC+zgcynshkHxzpXdImBehiUYKKKDwvfFiJl1tZt6ewscS1Mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "is-array-buffer": "^3.0.1" + "call-bind": "^1.0.5", + "is-array-buffer": "^3.0.4" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/array-includes": { - "version": "3.1.7", - "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.7.tgz", - "integrity": "sha512-dlcsNBIiWhPkHdOEEKnehA+RNUWDc4UqFtnIXU4uuYDPtA4LDkr7qip2p0VvFAEXNDr0yWZ9PJyIRiGjRLQzwQ==", + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.8.tgz", + "integrity": "sha512-itaWrbYbqpGXkGhZPGUulwnhVf5Hpy1xiCFsGqyIGglbBxmG5vSjxQen3/WGOjPpNEv1RtBLKxbmVXm8HpJStQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0", + "get-intrinsic": "^1.2.4", "is-string": "^1.0.7" }, "engines": { @@ -2730,16 +2994,17 @@ } }, "node_modules/array.prototype.findlastindex": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.3.tgz", - "integrity": "sha512-LzLoiOMAxvy+Gd3BAq3B7VeIgPdo+Q8hthvKtXybMvRV0jrXfJM/t8mw7nNlpEcVlVUnCnM2KSX4XU5HmpodOA==", + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.5.tgz", + "integrity": "sha512-zfETvRFA8o7EiNn++N5f/kaCw221hrpGsDmcpndVupkPzEc1Wuf3VgC0qby1BbHs7f5DVYjgtEU2LLh5bqeGfQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "es-shim-unscopables": "^1.0.0", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-shim-unscopables": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -2785,17 +3050,18 @@ } }, "node_modules/arraybuffer.prototype.slice": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.2.tgz", - "integrity": "sha512-yMBKppFur/fbHu9/6USUe03bZ4knMYiwFBcyiaXB8Go0qNehwX6inYPzK9U0NeQvGxKthcmHcaR8P5MStSRBAw==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.3.tgz", + "integrity": "sha512-bMxMKAjg13EBSVscxTaYA4mRc5t1UAXa2kXiGTNfZ079HIWXEkKmkgFrh/nJqamaLSrXO5H4WFFkPEaLJWbs3A==", "dev": true, "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1", - "is-array-buffer": "^3.0.2", + "array-buffer-byte-length": "^1.0.1", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", + "es-abstract": "^1.22.3", + "es-errors": "^1.2.1", + "get-intrinsic": "^1.2.3", + "is-array-buffer": "^3.0.4", "is-shared-array-buffer": "^1.0.2" }, "engines": { @@ -2853,10 +3119,13 @@ } }, "node_modules/available-typed-arrays": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.5.tgz", - "integrity": "sha512-DMD0KiN46eipeziST1LPP/STfDU0sufISXmjSgvVsoU2tqxctQeASejWcfNtxYKqETM1UxQ8sp2OrSBWpHY6sw==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", "dev": true, + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, "engines": { "node": ">= 0.4" }, @@ -2878,11 +3147,11 @@ "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==" }, "node_modules/axios": { - "version": "1.6.4", - "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.4.tgz", - "integrity": "sha512-heJnIs6N4aa1eSthhN9M5ioILu8Wi8vmQW9iHQ9NUvfkJb0lEEDUiIdQNAuBtfUt3FxReaKdpQA5DbmMOqzF/A==", + "version": "1.7.4", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.4.tgz", + "integrity": "sha512-DukmaFRnY6AzAALSH4J2M3k6PkaC+MfaAGdEERRWcC9q3/TWQwLpHR8ZRLKTdQ3aBDL64EdluRDjJqKw+BPZEw==", "dependencies": { - "follow-redirects": "^1.15.4", + "follow-redirects": "^1.15.6", "form-data": "^4.0.0", "proxy-from-env": "^1.1.0" } @@ -3083,15 +3352,10 @@ "node": ">=8" } }, - "node_modules/bl": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz", - "integrity": "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } + "node_modules/bluebird": { + "version": "3.7.2", + "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz", + "integrity": "sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg==" }, "node_modules/bn.js": { "version": "5.2.1", @@ -3166,9 +3430,9 @@ } }, "node_modules/buffer": { - "version": "5.7.1", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", - "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", + "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", "funding": [ { "type": "github", @@ -3185,7 +3449,7 @@ ], "dependencies": { "base64-js": "^1.3.1", - "ieee754": "^1.1.13" + "ieee754": "^1.2.1" } }, "node_modules/buffer-from": { @@ -3387,6 +3651,17 @@ "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", "integrity": "sha512-4tYFyifaFfGacoiObjJegolkwSU4xQNGbVgUiNYVUxbQ2x2lUsFvY4hVgVzGiIe6WLOPqycWXA40l+PWsxthUw==" }, + "node_modules/catharsis": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/catharsis/-/catharsis-0.9.0.tgz", + "integrity": "sha512-prMTQVpcns/tzFgFVkVp6ak6RykZyWb3gu8ckUpd6YkTlacOd3DXGJjIpD4Q6zJirizvaiAjSSHlOsA+6sNh2A==", + "dependencies": { + "lodash": "^4.17.15" + }, + "engines": { + "node": ">= 10" + } + }, "node_modules/chalk": { "version": "5.3.0", "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", @@ -3530,22 +3805,14 @@ } }, "node_modules/cli-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", - "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz", + "integrity": "sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw==", "dependencies": { - "restore-cursor": "^3.1.0" + "restore-cursor": "^5.0.0" }, "engines": { - "node": ">=8" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.1", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.1.tgz", - "integrity": "sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ==", - "engines": { - "node": ">=6" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -3649,14 +3916,6 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, - "node_modules/clone": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/clone/-/clone-1.0.4.tgz", - "integrity": "sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==", - "engines": { - "node": ">=0.8" - } - }, "node_modules/co": { "version": "4.6.0", "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", @@ -3894,18 +4153,69 @@ "node": ">=0.10" } }, - "node_modules/dateformat": { - "version": "4.6.3", - "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", - "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", - "engines": { - "node": "*" - } - }, - "node_modules/debug": { - "version": "4.3.4", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", - "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", + "node_modules/data-view-buffer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.1.tgz", + "integrity": "sha512-0lht7OugA5x3iJLOWFhWK/5ehONdprk0ISXqVFn/NFrDu+cuc8iADFrGQz5BnRK7LLU3JmkbXSxaqX+/mXYtUA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-length": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.1.tgz", + "integrity": "sha512-4J7wRJD3ABAzr8wP+OcIcqq2dlUKp4DVflx++hs5h5ZKydWMI6/D/fAot+yh6g2tHh8fLFTvNOaVN357NvSrOQ==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/data-view-byte-offset": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.0.tgz", + "integrity": "sha512-t/Ygsytq+R995EJ5PZlD4Cu56sWa8InXySaViRzw9apusqsOO2bQP+SbYzAhR0pFKoB+43lYy8rWban9JSuXnA==", + "dev": true, + "dependencies": { + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", + "is-data-view": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/dateformat": { + "version": "4.6.3", + "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-4.6.3.tgz", + "integrity": "sha512-2P0p0pFGzHS5EMnhdxQi7aJN+iMheud0UhG4dlE1DLAlvL8JHjJJTX/CSm4JXwV0Ka5nGk3zC5mcb5bUQUxxMA==", + "engines": { + "node": "*" + } + }, + "node_modules/debug": { + "version": "4.3.4", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.4.tgz", + "integrity": "sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==", "dev": true, "dependencies": { "ms": "2.1.2" @@ -4016,17 +4326,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/defaults": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/defaults/-/defaults-1.0.4.tgz", - "integrity": "sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==", - "dependencies": { - "clone": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/defer-to-connect": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", @@ -4233,6 +4532,28 @@ "node": ">=8.6" } }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/environment": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/environment/-/environment-1.1.0.tgz", + "integrity": "sha512-xUtoPkMggbz0MPyPiIWr1Kp4aeWJjDZ6SMvURhimjdZgsRuDplF5/s9hcgGhyXMhs+6vpnuoiZ2kFiu3FMnS8Q==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -4249,50 +4570,57 @@ "dev": true }, "node_modules/es-abstract": { - "version": "1.22.3", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.22.3.tgz", - "integrity": "sha512-eiiY8HQeYfYH2Con2berK+To6GrK2RxbPawDkGq4UiCQQfZHb6wX9qQqkbpPqaxQFcl8d9QzZqo0tGE0VcrdwA==", - "dev": true, - "dependencies": { - "array-buffer-byte-length": "^1.0.0", - "arraybuffer.prototype.slice": "^1.0.2", - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.5", - "es-set-tostringtag": "^2.0.1", + "version": "1.23.3", + "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.23.3.tgz", + "integrity": "sha512-e+HfNH61Bj1X9/jLc5v1owaLYuHdeHHSQlkhCBiTK8rBvKaULl/beGMxwrMXjpYrv4pz22BlY570vVePA2ho4A==", + "dev": true, + "dependencies": { + "array-buffer-byte-length": "^1.0.1", + "arraybuffer.prototype.slice": "^1.0.3", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", + "data-view-buffer": "^1.0.1", + "data-view-byte-length": "^1.0.1", + "data-view-byte-offset": "^1.0.0", + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "es-set-tostringtag": "^2.0.3", "es-to-primitive": "^1.2.1", "function.prototype.name": "^1.1.6", - "get-intrinsic": "^1.2.2", - "get-symbol-description": "^1.0.0", + "get-intrinsic": "^1.2.4", + "get-symbol-description": "^1.0.2", "globalthis": "^1.0.3", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.0", - "has-proto": "^1.0.1", + "has-property-descriptors": "^1.0.2", + "has-proto": "^1.0.3", "has-symbols": "^1.0.3", - "hasown": "^2.0.0", - "internal-slot": "^1.0.5", - "is-array-buffer": "^3.0.2", + "hasown": "^2.0.2", + "internal-slot": "^1.0.7", + "is-array-buffer": "^3.0.4", "is-callable": "^1.2.7", - "is-negative-zero": "^2.0.2", + "is-data-view": "^1.0.1", + "is-negative-zero": "^2.0.3", "is-regex": "^1.1.4", - "is-shared-array-buffer": "^1.0.2", + "is-shared-array-buffer": "^1.0.3", "is-string": "^1.0.7", - "is-typed-array": "^1.1.12", + "is-typed-array": "^1.1.13", "is-weakref": "^1.0.2", "object-inspect": "^1.13.1", "object-keys": "^1.1.1", - "object.assign": "^4.1.4", - "regexp.prototype.flags": "^1.5.1", - "safe-array-concat": "^1.0.1", - "safe-regex-test": "^1.0.0", - "string.prototype.trim": "^1.2.8", - "string.prototype.trimend": "^1.0.7", - "string.prototype.trimstart": "^1.0.7", - "typed-array-buffer": "^1.0.0", - "typed-array-byte-length": "^1.0.0", - "typed-array-byte-offset": "^1.0.0", - "typed-array-length": "^1.0.4", + "object.assign": "^4.1.5", + "regexp.prototype.flags": "^1.5.2", + "safe-array-concat": "^1.1.2", + "safe-regex-test": "^1.0.3", + "string.prototype.trim": "^1.2.9", + "string.prototype.trimend": "^1.0.8", + "string.prototype.trimstart": "^1.0.8", + "typed-array-buffer": "^1.0.2", + "typed-array-byte-length": "^1.0.1", + "typed-array-byte-offset": "^1.0.2", + "typed-array-length": "^1.0.6", "unbox-primitive": "^1.0.2", - "which-typed-array": "^1.1.13" + "which-typed-array": "^1.1.15" }, "engines": { "node": ">= 0.4" @@ -4322,15 +4650,27 @@ "node": ">= 0.4" } }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "dev": true, + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/es-set-tostringtag": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.2.tgz", - "integrity": "sha512-BuDyupZt65P9D2D2vA/zqcI3G5xRsklm5N3xCwuiy+/vKy8i0ifdsQP1sLgO4tZDSCaQUSnmC48khknGMV3D2Q==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.0.3.tgz", + "integrity": "sha512-3T8uNMC3OQTHkFUsFq8r/BwAXLHvU/9O9mE0fBc/MY5iq/8H7ncvO947LmYA6ldWw9Uh8Yhf25zu6n7nML5QWQ==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", - "has-tostringtag": "^1.0.0", - "hasown": "^2.0.0" + "get-intrinsic": "^1.2.4", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -4386,16 +4726,16 @@ } }, "node_modules/eslint": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", - "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", "dev": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.57.0", - "@humanwhocodes/config-array": "^0.11.14", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -4502,9 +4842,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.8.0", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.0.tgz", - "integrity": "sha512-aWajIYfsqCKRDgUfjEXNN/JlrzauMuSEy5sbd7WXbtW3EH6A6MpwEh42c7qD+MqQo9QMJ6fWLAeIJynx0g6OAw==", + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.9.0.tgz", + "integrity": "sha512-McVbYmwA3NEKwRQY5g4aWMdcZE5xZxV8i8l7CqJSrameuGSQJtSWaL/LxTEzSKKaCcOhlpDR8XEfYXWPrdo/ZQ==", "dev": true, "dependencies": { "debug": "^3.2.7" @@ -4560,26 +4900,27 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", - "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", + "version": "2.30.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.30.0.tgz", + "integrity": "sha512-/mHNE9jINJfiD2EKkg1BKyPyUk4zdnT54YgbOgfjSakWT5oyX/qQLVNTkehyfpcMxZXMy1zyonZ2v7hZTX43Yw==", "dev": true, "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", "array.prototype.flat": "^1.3.2", "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "eslint-module-utils": "^2.9.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", "semver": "^6.3.1", "tsconfig-paths": "^3.15.0" }, @@ -4676,9 +5017,9 @@ } }, "node_modules/eslint-plugin-promise": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.2.0.tgz", - "integrity": "sha512-QmAqwizauvnKOlifxyDj2ObfULpHQawlg/zQdgEixur9vl0CvZGv/LCJV2rtj3210QCoeGBzVMfMXqGAOr/4fA==", + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz", + "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==", "dev": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" @@ -4986,17 +5327,6 @@ "node": ">=4" } }, - "node_modules/external-editor/node_modules/tmp": { - "version": "0.0.33", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", - "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", - "dependencies": { - "os-tmpdir": "~1.0.2" - }, - "engines": { - "node": ">=0.6.0" - } - }, "node_modules/extsprintf": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", @@ -5205,9 +5535,9 @@ } }, "node_modules/foreground-child": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", - "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.0.tgz", + "integrity": "sha512-Ld2g8rrAyMYFXBhEqMz8ZAHBi4J4uS1i/CxGMDnjyFWddMXLVcDp051DZfu+t7+ab7Wv6SMqpWmyFIj5UbfFvg==", "dependencies": { "cross-spawn": "^7.0.0", "signal-exit": "^4.0.1" @@ -5403,13 +5733,14 @@ } }, "node_modules/get-symbol-description": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.0.tgz", - "integrity": "sha512-2EmdH1YvIQiZpltCNgkuiUnyukzxM/R6NDJX31Ke3BG1Nq5b0S2PhX59UKi9vZpPDQVdqn+1IcaAwnzTT5vCjw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.0.2.tgz", + "integrity": "sha512-g0QYk1dZBxGwk+Ngc+ltRH2IBp2f7zBkBMBJZCDerh6EhlhSR6+9irMCuT/09zD6qkarHUSn529sK/yL4S27mg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.1" + "call-bind": "^1.0.5", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4" }, "engines": { "node": ">= 0.4" @@ -5506,17 +5837,16 @@ } }, "node_modules/got": { - "version": "14.4.1", - "resolved": "https://registry.npmjs.org/got/-/got-14.4.1.tgz", - "integrity": "sha512-IvDJbJBUeexX74xNQuMIVgCRRuNOm5wuK+OC3Dc2pnSoh1AOmgc7JVj7WC+cJ4u0aPcO9KZ2frTXcqK4W/5qTQ==", + "version": "14.4.2", + "resolved": "https://registry.npmjs.org/got/-/got-14.4.2.tgz", + "integrity": "sha512-+Te/qEZ6hr7i+f0FNgXx/6WQteSM/QqueGvxeYQQFm0GDfoxLVJ/oiwUKYMTeioColWUTdewZ06hmrBjw6F7tw==", "dependencies": { - "@sindresorhus/is": "^6.3.1", + "@sindresorhus/is": "^7.0.0", "@szmarczak/http-timer": "^5.0.1", "cacheable-lookup": "^7.0.0", "cacheable-request": "^12.0.1", "decompress-response": "^6.0.0", "form-data-encoder": "^4.0.2", - "get-stream": "^8.0.1", "http2-wrapper": "^2.2.1", "lowercase-keys": "^3.0.0", "p-cancelable": "^4.0.1", @@ -5530,17 +5860,6 @@ "url": "https://github.com/sindresorhus/got?sponsor=1" } }, - "node_modules/got/node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/got/node_modules/type-fest": { "version": "4.19.0", "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.19.0.tgz", @@ -5555,8 +5874,7 @@ "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", - "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", - "dev": true + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, "node_modules/graphemer": { "version": "1.4.0", @@ -5598,6 +5916,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, "engines": { "node": ">=8" } @@ -5615,9 +5934,9 @@ } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.3.tgz", + "integrity": "sha512-SJ1amZAJUiZS+PhsVLf5tGydlaVB8EdFpaSO4gmiUKUOxk8qzn5AIy4ZeJUmh22znIdk/uMAUT2pl3FxzVUH+Q==", "dev": true, "engines": { "node": ">= 0.4" @@ -5639,12 +5958,12 @@ } }, "node_modules/has-tostringtag": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz", - "integrity": "sha512-kFjcSNhnlGV1kyoGk7OXKSawH5JOb/LzUc5w9B02hOTO0dfFRjbHQKvg1d6cf3HbeUmtU9VbbV3qzZ2Teh97WQ==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", "dev": true, "dependencies": { - "has-symbols": "^1.0.2" + "has-symbols": "^1.0.3" }, "engines": { "node": ">= 0.4" @@ -5688,9 +6007,9 @@ } }, "node_modules/hasown": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.0.tgz", - "integrity": "sha512-vUptKVTpIJhcczKBbgnS+RtcuYMB8+oNzPK2/Hp3hanz8JmpATdmmgLgSaadVREkDm+e2giHwY3ZRkyjSIDDFA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", "dev": true, "dependencies": { "function-bind": "^1.1.2" @@ -5936,34 +6255,30 @@ } }, "node_modules/inquirer": { - "version": "9.3.1", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-9.3.1.tgz", - "integrity": "sha512-A5IdVr1I04XqPlwrGgTJMKmzRg5ropqNpSeqo0vj1ZmluSCNSFaPZz4eazdPrhVcZfej7fCEYvD2NYa1KjkTJA==", - "dependencies": { - "@inquirer/figures": "^1.0.3", + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-11.1.0.tgz", + "integrity": "sha512-CmLAZT65GG/v30c+D2Fk8+ceP6pxD6RL+hIUOWAltCmeyEqWYwqu9v76q03OvjyZ3AB0C1Ala2stn1z/rMqGEw==", + "dependencies": { + "@inquirer/core": "^9.2.1", + "@inquirer/prompts": "^6.0.1", + "@inquirer/type": "^2.0.0", + "@types/mute-stream": "^0.0.4", "ansi-escapes": "^4.3.2", - "cli-width": "^4.1.0", - "external-editor": "^3.1.0", - "mute-stream": "1.0.0", - "ora": "^5.4.1", - "picocolors": "^1.0.1", + "mute-stream": "^1.0.0", "run-async": "^3.0.0", - "rxjs": "^7.8.1", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^6.2.0" + "rxjs": "^7.8.1" }, "engines": { "node": ">=18" } }, "node_modules/internal-slot": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.6.tgz", - "integrity": "sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==", + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.0.7.tgz", + "integrity": "sha512-NGnrKwXzSms2qUUih/ILZ5JBqNTSa1+ZmP6flaIp6KmSElgE9qdndzS3cqjrDovwFdmwsGsLdeFgB6suw+1e9g==", "dev": true, "dependencies": { - "get-intrinsic": "^1.2.2", + "es-errors": "^1.3.0", "hasown": "^2.0.0", "side-channel": "^1.0.4" }, @@ -5977,9 +6292,9 @@ "integrity": "sha512-lJUL9imLTNi1ZfXT+DU6rBBdbiKGBuay9B6xGSPVjUeQwaH1RIGqef8RZkUtHioLmSNpPR5M4HVKJGm1j8FWVQ==" }, "node_modules/ipaddr.js": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.1.0.tgz", - "integrity": "sha512-LlbxQ7xKzfBusov6UMi4MFpEg0m+mAm9xyNGEduwXMEDuf4WfzB/RZwMVYEd7IKGvh4IUkEXYxtAVu9T3OelJQ==", + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-2.2.0.tgz", + "integrity": "sha512-Ag3wB2o37wslZS19hZqorUnrnzSkpOVy+IiiDEiTqNubEYpYuHWIf6K4psgN2ZWKExS4xhVCrRVfb/wfW8fWJA==", "engines": { "node": ">= 10" } @@ -6009,14 +6324,16 @@ } }, "node_modules/is-array-buffer": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", - "integrity": "sha512-y+FyyR/w8vfIRq4eQcM1EYgSTnmHXPqaF+IgzgraytCFq5Xh8lllDVmAZolPJiZttZLeFSINPYMaEJ7/vWUa1w==", + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.4.tgz", + "integrity": "sha512-wcjaerHw0ydZwfhiKbXJWLDY8A7yV7KhjQOpb83hGgGfId/aQa4TOvwyzn2PuswW2gPCYEL/nEAiSVpdOj1lXw==", "dev": true, "dependencies": { "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.0", - "is-typed-array": "^1.1.10" + "get-intrinsic": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6095,12 +6412,30 @@ } }, "node_modules/is-core-module": { - "version": "2.13.1", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", - "integrity": "sha512-hHrIjvZsftOsvKSn2TRYl63zvxsgE0K+0mYMoH6gD4omR5IWB2KynivBQczo3+wF1cCkjzvptnI9Q0sPU66ilw==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "dependencies": { - "hasown": "^2.0.0" + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-data-view": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.1.tgz", + "integrity": "sha512-AHkaJrsUVW6wq6JS8y3JnM/GJF/9cf+k20+iDzlSaJrinEo5+7vRiteOSwBhHRiAyQATN1AmY4hwzxJKPmYf+w==", + "dev": true, + "dependencies": { + "is-typed-array": "^1.1.13" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6185,18 +6520,10 @@ "url": "https://github.com/sponsors/wooorm" } }, - "node_modules/is-interactive": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-1.0.0.tgz", - "integrity": "sha512-2HvIEKRoqS62guEC+qBjpvRubdX910WCMuJTZ+I9yvqKU2/12eSL549HMwtabb4oupdj2sMP50k+XJfB/8JE6w==", - "engines": { - "node": ">=8" - } - }, "node_modules/is-negative-zero": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.2.tgz", - "integrity": "sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz", + "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==", "dev": true, "engines": { "node": ">= 0.4" @@ -6267,12 +6594,15 @@ } }, "node_modules/is-shared-array-buffer": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz", - "integrity": "sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.3.tgz", + "integrity": "sha512-nA2hv5XIhLR3uVzDDfCIknerhx8XUKnstuOERPNNIinXG7v9u+ohXF67vxm4TPTEPU6lm61ZkwP3c9PCB97rhg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2" + "call-bind": "^1.0.7" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -6320,12 +6650,12 @@ } }, "node_modules/is-typed-array": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.12.tgz", - "integrity": "sha512-Z14TF2JNG8Lss5/HMqt0//T9JeHXttXy5pH/DBU4vi98ozO2btxzq9MwYDZYnKwU8nRsz/+GVFVRDq3DkVuSPg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.13.tgz", + "integrity": "sha512-uZ25/bUAlUY5fR4OKT4rZQEBrzQWYV9ZJYGGsUmEJ6thodVJ1HX64ePQ6Z0qPWP+m+Uq6e9UugrE38jeYsDSMw==", "dev": true, "dependencies": { - "which-typed-array": "^1.1.11" + "which-typed-array": "^1.1.14" }, "engines": { "node": ">= 0.4" @@ -6803,6 +7133,12 @@ "jest": "^27.0.3 || ^28.1.0 || ^29.0.0" } }, + "node_modules/jest-expect-message": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/jest-expect-message/-/jest-expect-message-1.1.3.tgz", + "integrity": "sha512-bTK77T4P+zto+XepAX3low8XVQxDgaEqh3jSTQOG8qvPpD69LsIdyJTa+RmnJh3HNSzJng62/44RPPc7OIlFxg==", + "dev": true + }, "node_modules/jest-get-type": { "version": "29.6.3", "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", @@ -7361,11 +7697,63 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/js2xmlparser": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/js2xmlparser/-/js2xmlparser-4.0.2.tgz", + "integrity": "sha512-6n4D8gLlLf1n5mNLQPRfViYzu9RATblzPEtm1SthMX1Pjao0r9YI9nw7ZIfRxQMERS87mcswrg+r/OYrPRX6jA==", + "dependencies": { + "xmlcreate": "^2.0.4" + } + }, "node_modules/jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", "integrity": "sha512-UVU9dibq2JcFWxQPA6KCqj5O42VOmAY3zQUfEKxU0KpTGXwNoCjkX1e13eHNvw/xPynt6pU0rZ1htjWTNTSXsg==" }, + "node_modules/jsdoc": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/jsdoc/-/jsdoc-4.0.3.tgz", + "integrity": "sha512-Nu7Sf35kXJ1MWDZIMAuATRQTg1iIPdzh7tqJ6jjvaU/GfDf+qi5UV8zJR3Mo+/pYFvm8mzay4+6O5EWigaQBQw==", + "dependencies": { + "@babel/parser": "^7.20.15", + "@jsdoc/salty": "^0.2.1", + "@types/markdown-it": "^14.1.1", + "bluebird": "^3.7.2", + "catharsis": "^0.9.0", + "escape-string-regexp": "^2.0.0", + "js2xmlparser": "^4.0.2", + "klaw": "^3.0.0", + "markdown-it": "^14.1.0", + "markdown-it-anchor": "^8.6.7", + "marked": "^4.0.10", + "mkdirp": "^1.0.4", + "requizzle": "^0.2.3", + "strip-json-comments": "^3.1.0", + "underscore": "~1.13.2" + }, + "bin": { + "jsdoc": "jsdoc.js" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/jsdoc/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/jsep": { + "version": "1.3.9", + "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.3.9.tgz", + "integrity": "sha512-i1rBX5N7VPl0eYb6+mHNp52sEuaS2Wi8CDYx1X5sn9naevL78+265XJqy1qENEk7mRKwS06NHpUqiBwR7qeodw==", + "engines": { + "node": ">= 10.16.0" + } + }, "node_modules/jsesc": { "version": "2.5.2", "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", @@ -7423,9 +7811,14 @@ } }, "node_modules/jsonpath-plus": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-8.1.0.tgz", - "integrity": "sha512-qVTiuKztFGw0dGhYi3WNqvddx3/SHtyDT0xJaeyz4uP0d1tkpG+0y5uYQ4OcIo1TLAz3PE/qDOW9F0uDt3+CTw==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-9.0.0.tgz", + "integrity": "sha512-bqE77VIDStrOTV/czspZhTn+o27Xx9ZJRGVkdVShEtPoqsIx5yALv3lWVU6y+PqYvWPJNWE7ORCQheQkEe0DDA==", + "dependencies": { + "@jsep-plugin/assignment": "^1.2.1", + "@jsep-plugin/regex": "^1.0.3", + "jsep": "^1.3.8" + }, "bin": { "jsonpath": "bin/jsonpath-cli.js", "jsonpath-plus": "bin/jsonpath-cli.js" @@ -7456,6 +7849,14 @@ "json-buffer": "3.0.1" } }, + "node_modules/klaw": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/klaw/-/klaw-3.0.0.tgz", + "integrity": "sha512-0Fo5oir+O9jnXu5EefYbVK+mHMBeEVEy2cmctR1O1NECcCkPRreJKrS6Qt/j3KC2C148Dfo9i3pCmCMsdqGr0g==", + "dependencies": { + "graceful-fs": "^4.1.9" + } + }, "node_modules/kleur": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", @@ -7498,16 +7899,24 @@ "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", "dev": true }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dependencies": { + "uc.micro": "^2.0.0" + } + }, "node_modules/listr2": { - "version": "8.2.1", - "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.1.tgz", - "integrity": "sha512-irTfvpib/rNiD637xeevjO2l3Z5loZmuaRi0L0YE5LfijwVY96oyVn0DFD3o/teAok7nfobMG1THvvcHh/BP6g==", + "version": "8.2.4", + "resolved": "https://registry.npmjs.org/listr2/-/listr2-8.2.4.tgz", + "integrity": "sha512-opevsywziHd3zHCVQGAj8zu+Z3yHNkkoYhWIGnq54RrCVwLz0MozotJEDnKsIBLvkfLGN6BLOyAeRrYI0pKA4g==", "dependencies": { "cli-truncate": "^4.0.0", "colorette": "^2.0.20", "eventemitter3": "^5.0.1", - "log-update": "^6.0.0", - "rfdc": "^1.3.1", + "log-update": "^6.1.0", + "rfdc": "^1.4.1", "wrap-ansi": "^9.0.0" }, "engines": { @@ -7613,6 +8022,11 @@ "node": ">=8" } }, + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + }, "node_modules/lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", @@ -7630,55 +8044,14 @@ "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", "dev": true }, - "node_modules/log-symbols": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", - "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/log-symbols/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/log-symbols/node_modules/is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/log-update": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.0.0.tgz", - "integrity": "sha512-niTvB4gqvtof056rRIrTZvjNYE4rCUzO6X/X+kYjd7WFxXeJ0NwEFnRxX6ehkvv3jTwrXnNdtAak5XYZuIyPFw==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/log-update/-/log-update-6.1.0.tgz", + "integrity": "sha512-9ie8ItPR6tjY5uYJh8K/Zrv/RMZ5VOlOWvtZdEHYSTFKZfIBPQa9tOAEeAWhd+AnIneLJ22w5fjOYtoutpWq5w==", "dependencies": { - "ansi-escapes": "^6.2.0", - "cli-cursor": "^4.0.0", - "slice-ansi": "^7.0.0", + "ansi-escapes": "^7.0.0", + "cli-cursor": "^5.0.0", + "slice-ansi": "^7.1.0", "strip-ansi": "^7.1.0", "wrap-ansi": "^9.0.0" }, @@ -7690,14 +8063,14 @@ } }, "node_modules/log-update/node_modules/ansi-escapes": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-6.2.0.tgz", - "integrity": "sha512-kzRaCqXnpzWs+3z5ABPQiVke+iq0KXkHo8xiWV4RPTi5Yli0l97BEQuhXV1s7+aSU/fu1kUuxgS4MsQ0fRuygw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.0.0.tgz", + "integrity": "sha512-GdYO7a61mR0fOlAsvC9/rIHf7L96sBc6dEWzeOu+KAea5bZyQRPIpojrVoI4AXGJS/ycu/fBTdLrUkA4ODrvjw==", "dependencies": { - "type-fest": "^3.0.0" + "environment": "^1.0.0" }, "engines": { - "node": ">=14.16" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -7725,20 +8098,6 @@ "url": "https://github.com/chalk/ansi-styles?sponsor=1" } }, - "node_modules/log-update/node_modules/cli-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", - "dependencies": { - "restore-cursor": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/log-update/node_modules/emoji-regex": { "version": "10.3.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz", @@ -7758,21 +8117,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/log-update/node_modules/restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/log-update/node_modules/slice-ansi": { "version": "7.1.0", "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-7.1.0.tgz", @@ -7789,9 +8133,9 @@ } }, "node_modules/log-update/node_modules/string-width": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.1.0.tgz", - "integrity": "sha512-SEIJCWiX7Kg4c129n48aDRwLbFb2LJmXXFrWBG4NGaRtMQ3myKPKbwrD1BKqQn74oCoNMBVrfDEr5M9YxCsrkw==", + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz", + "integrity": "sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ==", "dependencies": { "emoji-regex": "^10.3.0", "get-east-asian-width": "^1.0.0", @@ -7818,17 +8162,6 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/log-update/node_modules/type-fest": { - "version": "3.13.1", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz", - "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==", - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/log-update/node_modules/wrap-ansi": { "version": "9.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-9.0.0.tgz", @@ -7932,6 +8265,42 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it-anchor": { + "version": "8.6.7", + "resolved": "https://registry.npmjs.org/markdown-it-anchor/-/markdown-it-anchor-8.6.7.tgz", + "integrity": "sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==", + "peerDependencies": { + "@types/markdown-it": "*", + "markdown-it": "*" + } + }, + "node_modules/marked": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", + "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", + "bin": { + "marked": "bin/marked.js" + }, + "engines": { + "node": ">= 12" + } + }, "node_modules/mdast-comment-marker": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/mdast-comment-marker/-/mdast-comment-marker-3.0.0.tgz", @@ -8083,6 +8452,11 @@ "url": "https://opencollective.com/unified" } }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==" + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -8567,10 +8941,22 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, "engines": { "node": ">=6" } }, + "node_modules/mimic-function": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", + "integrity": "sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/mimic-response": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-4.0.0.tgz", @@ -8612,9 +8998,9 @@ } }, "node_modules/minipass": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.0.4.tgz", - "integrity": "sha512-jYofLM5Dam9279rdkWzqHozUo4ybjdZmCsDHePy5V/PbBcVMiSZR97gmAy45aqi8CK1lG2ECd356FU86avfwUQ==", + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", "engines": { "node": ">=16 || 14 >=14.17" } @@ -8695,7 +9081,6 @@ "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, "bin": { "mkdirp": "bin/cmd.js" }, @@ -8811,9 +9196,9 @@ } }, "node_modules/nyc": { - "version": "17.0.0", - "resolved": "https://registry.npmjs.org/nyc/-/nyc-17.0.0.tgz", - "integrity": "sha512-ISp44nqNCaPugLLGGfknzQwSwt10SSS5IMoPR7GLoMAyS18Iw5js8U7ga2VF9lYuMZ42gOHr3UddZw4WZltxKg==", + "version": "17.1.0", + "resolved": "https://registry.npmjs.org/nyc/-/nyc-17.1.0.tgz", + "integrity": "sha512-U42vQ4czpKa0QdI1hu950XuNhYqgoM+ZF1HT+VuUHL9hPfDPVvNQyltmMqdE9bUHMVa+8yNbc3QKTj8zQhlVxQ==", "dev": true, "dependencies": { "@istanbuljs/load-nyc-config": "^1.0.0", @@ -8823,7 +9208,7 @@ "decamelize": "^1.2.0", "find-cache-dir": "^3.2.0", "find-up": "^4.1.0", - "foreground-child": "^2.0.0", + "foreground-child": "^3.3.0", "get-package-type": "^0.1.0", "glob": "^7.1.6", "istanbul-lib-coverage": "^3.0.0", @@ -8868,19 +9253,6 @@ "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==", "dev": true }, - "node_modules/nyc/node_modules/foreground-child": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz", - "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==", - "dev": true, - "dependencies": { - "cross-spawn": "^7.0.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8.0.0" - } - }, "node_modules/nyc/node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", @@ -8964,10 +9336,13 @@ } }, "node_modules/object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "version": "1.13.2", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.2.tgz", + "integrity": "sha512-IRZSRuzJiynemAXPYtPe5BoI/RESNYR7TYm50MC5Mqbd3Jmw5y790sErYw3V6SryFJD64b74qQQs9wn5Bg/k3g==", "dev": true, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -8982,13 +9357,13 @@ } }, "node_modules/object.assign": { - "version": "4.1.4", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.4.tgz", - "integrity": "sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ==", + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.5.tgz", + "integrity": "sha512-byy+U7gp+FVwmyzKPYhW2h5l3crpmGsxl7X2s8y43IgxvG4g3QZ6CffDtsNQy1WsmZpQbO+ybo0AlW7TY6DcBQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.1.4", + "call-bind": "^1.0.5", + "define-properties": "^1.2.1", "has-symbols": "^1.0.3", "object-keys": "^1.1.1" }, @@ -9000,14 +9375,15 @@ } }, "node_modules/object.fromentries": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.7.tgz", - "integrity": "sha512-UPbPHML6sL8PI/mOqPwsH4G6iyXcCGzLin8KvEPenOZN5lpCNBZZQ+V62vdjB1mQHrmqGQt5/OJzemUA+KJmEA==", + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz", + "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -9017,26 +9393,28 @@ } }, "node_modules/object.groupby": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.1.tgz", - "integrity": "sha512-HqaQtqLnp/8Bn4GL16cj+CUYbnpe1bh0TtEaWvybszDG4tgxCJuRpV8VGuvNaI1fAnI4lUJzDG55MXcOH4JZcQ==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz", + "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1", - "get-intrinsic": "^1.2.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.2" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/object.values": { - "version": "1.1.7", - "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.7.tgz", - "integrity": "sha512-aU6xnDFYT3x17e/f0IiiwlGPTy2jzMySGfUB4fq6z7CV8l85CWHDk5ErhyhpfDHhrOMwGFhSQkhMGHaIotA6Ng==", + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.0.tgz", + "integrity": "sha512-yBYjY9QX2hnRmZHAjG/f13MzmBzxzYgQhFrke06TTyKY5zSTEqkOeukBzIdVA3j3ulu8Qa3MbVFShV7T2RmGtQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -9082,6 +9460,7 @@ "version": "5.1.2", "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, "dependencies": { "mimic-fn": "^2.1.0" }, @@ -9142,54 +9521,6 @@ "node": ">= 0.8.0" } }, - "node_modules/ora": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-5.4.1.tgz", - "integrity": "sha512-5b6Y85tPxZZ7QytO+BQzysW31HJku27cRIlkbAXaNx+BdcVi+LlRFmVXzeF6a7JCwJpyw5c4b+YSVImQIrBpuQ==", - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora/node_modules/chalk": { - "version": "4.1.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", - "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/ora/node_modules/is-unicode-supported": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", - "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/os-tmpdir": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", @@ -9402,7 +9733,8 @@ "node_modules/picocolors": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==" + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "dev": true }, "node_modules/picomatch": { "version": "2.3.1", @@ -9446,29 +9778,6 @@ "split2": "^4.0.0" } }, - "node_modules/pino-abstract-transport/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/pino-abstract-transport/node_modules/readable-stream": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz", @@ -9508,29 +9817,6 @@ "pino-pretty": "bin.js" } }, - "node_modules/pino-pretty/node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/pino-pretty/node_modules/readable-stream": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.4.2.tgz", @@ -9581,6 +9867,15 @@ "node": ">=4" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "dev": true, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/prelude-ls": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", @@ -9718,6 +10013,14 @@ "node": ">=6" } }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "engines": { + "node": ">=6" + } + }, "node_modules/pure-rand": { "version": "6.0.4", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.0.4.tgz", @@ -9861,14 +10164,15 @@ "integrity": "sha512-urBwgfrvVP/eAyXx4hluJivBKzuEbSQs9rKWCrCkbSxNv8mxPcUZKeuoF3Uy4mJl3Lwprp6yy5/39VWigZ4K6Q==" }, "node_modules/regexp.prototype.flags": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.1.tgz", - "integrity": "sha512-sy6TXMN+hnP/wMy+ISxg3krXx7BAtWVO4UouuCN/ziM9UEne0euamVNafDfvC83bRNr95y0V5iijeDQFUNpvrg==", + "version": "1.5.2", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", + "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "set-function-name": "^2.0.0" + "call-bind": "^1.0.6", + "define-properties": "^1.2.1", + "es-errors": "^1.3.0", + "set-function-name": "^2.0.1" }, "engines": { "node": ">= 0.4" @@ -10601,6 +10905,14 @@ "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", "dev": true }, + "node_modules/requizzle": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/requizzle/-/requizzle-0.2.4.tgz", + "integrity": "sha512-JRrFk1D4OQ4SqovXOgdav+K8EAhSB/LJZqCz8tbX0KObcdeM15Ss59ozWMBWmmINMagCwmqn4ZNryUGpBsl6Jw==", + "dependencies": { + "lodash": "^4.17.21" + } + }, "node_modules/resolve": { "version": "1.22.8", "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", @@ -10677,15 +10989,43 @@ } }, "node_modules/restore-cursor": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", - "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz", + "integrity": "sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==", "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" + "onetime": "^7.0.0", + "signal-exit": "^4.1.0" }, "engines": { - "node": ">=8" + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/restore-cursor/node_modules/onetime": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz", + "integrity": "sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ==", + "dependencies": { + "mimic-function": "^5.0.0" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/restore-cursor/node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/reusify": { @@ -10704,9 +11044,9 @@ "integrity": "sha512-MjOWxM065+WswwnmNONOT+bD1nXzY9Km6u3kzvnx8F8/HXGZdz3T6e6vZJ8Q/RIMUSp/nxqjH3GwvJDy8ijeQQ==" }, "node_modules/rfdc": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", - "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==" + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.4.1.tgz", + "integrity": "sha512-q1b3N5QkRUWUl7iyylaaj3kOpIT0N2i9MqIEQXP73GVsN9cw3fdx8X63cEmWhJGi2PPCF23Ijp7ktmd39rawIA==" }, "node_modules/rimraf": { "version": "3.0.2", @@ -10763,13 +11103,13 @@ } }, "node_modules/safe-array-concat": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.0.1.tgz", - "integrity": "sha512-6XbUAseYE2KtOuGueyeobCySj9L4+66Tn6KQMOPQJrAJEowYKW/YR/MGJZl7FdydUdaFu4LYyDZjxf4/Nmo23Q==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.2.tgz", + "integrity": "sha512-vj6RsCsWBCf19jIeHEfkRMw8DPiBb+DMXklQ/1SGDHOMlHdPUkZXFQ2YdplS23zESTijAcurb1aSgJA3AgMu1Q==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", + "call-bind": "^1.0.7", + "get-intrinsic": "^1.2.4", "has-symbols": "^1.0.3", "isarray": "^2.0.5" }, @@ -10800,15 +11140,18 @@ ] }, "node_modules/safe-regex-test": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.0.tgz", - "integrity": "sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==", + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.0.3.tgz", + "integrity": "sha512-CdASjNJPvRa7roO6Ra/gLYBTzYzzPyyBXxIMdGW3USQLyjWEls2RgW5UBTXaQVp+OrpeCK3bLem8smtmheoRuw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.1.3", + "call-bind": "^1.0.6", + "es-errors": "^1.3.0", "is-regex": "^1.1.4" }, + "engines": { + "node": ">= 0.4" + }, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -10838,9 +11181,9 @@ "dev": true }, "node_modules/semver": { - "version": "7.6.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz", - "integrity": "sha512-FNAIBWCx9qcRhoHcgcJ0gvU7SN1lYU2ZXuSfl04bSC5OpvDHFyJCjdNHomPXxjQlCBU67YW64PzY7/VIEH7F2w==", + "version": "7.6.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", + "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "bin": { "semver": "bin/semver.js" }, @@ -10872,14 +11215,15 @@ } }, "node_modules/set-function-name": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.1.tgz", - "integrity": "sha512-tMNCiqYVkXIZgc2Hnoy2IvC/f8ezc5koaRFkCjrpWzGpCd3qbZXPzVy9MAZzK1ch/X0jvSkojys3oqJN0qCmdA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz", + "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==", "dev": true, "dependencies": { - "define-data-property": "^1.0.1", + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", "functions-have-names": "^1.2.3", - "has-property-descriptors": "^1.0.0" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -10905,14 +11249,18 @@ } }, "node_modules/side-channel": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz", - "integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.6.tgz", + "integrity": "sha512-fDW/EZ6Q9RiO8eFG8Hj+7u/oW+XrPTIChwCOM2+th2A6OblDtYYIpve9m+KvI9Z4C9qSEXlaGR6bTEYHReuglA==", "dev": true, "dependencies": { - "call-bind": "^1.0.0", - "get-intrinsic": "^1.0.2", - "object-inspect": "^1.9.0" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.4", + "object-inspect": "^1.13.1" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -10921,7 +11269,8 @@ "node_modules/signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true }, "node_modules/simple-swizzle": { "version": "0.2.2", @@ -11152,9 +11501,9 @@ } }, "node_modules/stream-buffers": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-3.0.2.tgz", - "integrity": "sha512-DQi1h8VEBA/lURbSwFtEHnSTb9s2/pwLEaFuNhXwy1Dx3Sa0lOuYT2yNUr4/j2fs8oCAMANtrZ5OrPZtyVs3MQ==", + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/stream-buffers/-/stream-buffers-3.0.3.tgz", + "integrity": "sha512-pqMqwQCso0PBJt2PQmDO0cFj0lyqmiwOMiMSkVtRokl7e+ZTRYgDHKnuZNbqjiJXgsg4nuqtD/zxuo9KqTp0Yw==", "engines": { "node": ">= 0.10.0" } @@ -11208,14 +11557,15 @@ } }, "node_modules/string.prototype.trim": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.8.tgz", - "integrity": "sha512-lfjY4HcixfQXOfaqCvcBuOIapyaroTXhbkfJN3gcB1OtyupngWK4sEET9Knd0cXd28kTUqu/kHoV4HKSJdnjiQ==", + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.9.tgz", + "integrity": "sha512-klHuCNxiMZ8MlsOihJhJEBJAiMVqU3Z2nEXWfWnIqjN0gEFS9J9+IxKozWWtQGcgoa1WUZzLjKPTr4ZHNFTFxw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-abstract": "^1.23.0", + "es-object-atoms": "^1.0.0" }, "engines": { "node": ">= 0.4" @@ -11225,28 +11575,31 @@ } }, "node_modules/string.prototype.trimend": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.7.tgz", - "integrity": "sha512-Ni79DqeB72ZFq1uH/L6zJ+DKZTkOtPIHovb3YZHQViE+HDouuU4mBrLOLDn5Dde3RF8qw5qVETEjhu9locMLvA==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.8.tgz", + "integrity": "sha512-p73uL5VCHCO2BZZ6krwwQE3kCzM7NKmis8S//xEC6fQonchbum4eP6kR4DLEjQFO3Wnj3Fuo8NM0kOSjVdHjZQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" }, "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/string.prototype.trimstart": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.7.tgz", - "integrity": "sha512-NGhtDFu3jCEm7B4Fy0DpLewdJQOZcQ0rGbwQ/+stjnrp2i+rlKeCvos9hOIeCmqwratM47OBxY7uFZzjxHXmrg==", + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz", + "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "define-properties": "^1.2.0", - "es-abstract": "^1.22.1" + "call-bind": "^1.0.7", + "define-properties": "^1.2.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -11322,6 +11675,7 @@ "version": "7.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, "dependencies": { "has-flag": "^4.0.0" }, @@ -11342,13 +11696,13 @@ } }, "node_modules/tar": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.2.0.tgz", - "integrity": "sha512-hctwP0Nb4AB60bj8WQgRYaMOuJYRAPMGiQUAotms5igN8ppfQM+IvjQ5HcKu1MaZh2Wy2KWVTe563Yj8dfc14w==", + "version": "7.4.3", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz", + "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", - "minipass": "^7.1.0", + "minipass": "^7.1.2", "minizlib": "^3.0.1", "mkdirp": "^3.0.1", "yallist": "^5.0.0" @@ -11357,14 +11711,6 @@ "node": ">=18" } }, - "node_modules/tar/node_modules/minipass": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.0.tgz", - "integrity": "sha512-oGZRv2OT1lO2UF1zUcwdTb3wqUwI0kBGTgt/T7OdSj6M6N5m3o5uPf0AIW6lVxGGoiWUR7e2AwTE+xiwK8WQig==", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/tar/node_modules/mkdirp": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", @@ -11420,6 +11766,17 @@ "real-require": "^0.2.0" } }, + "node_modules/tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, "node_modules/tmpl": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", @@ -11511,9 +11868,9 @@ } }, "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.7.0.tgz", + "integrity": "sha512-gLXCKdN1/j47AiHiOkJN69hJmcbGTHI0ImLmbYLHykhgeN0jVGola9yVjFgzCUklsZQMW55o+dW7IXv3RCXDzA==" }, "node_modules/tsyringe": { "version": "4.8.0", @@ -11580,29 +11937,30 @@ } }, "node_modules/typed-array-buffer": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.0.tgz", - "integrity": "sha512-Y8KTSIglk9OZEr8zywiIHG/kmQ7KWyjseXs1CbSo8vC42w7hg2HgYTxSWwP0+is7bWDc1H+Fo026CpHFwm8tkw==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.2.tgz", + "integrity": "sha512-gEymJYKZtKXzzBzM4jqa9w6Q1Jjm7x2d+sh19AdsD4wqnMPDYyvwpsIc2Q/835kHuo3BEQ7CjelGhfTsoBb2MQ==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", - "get-intrinsic": "^1.2.1", - "is-typed-array": "^1.1.10" + "call-bind": "^1.0.7", + "es-errors": "^1.3.0", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" } }, "node_modules/typed-array-byte-length": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.0.tgz", - "integrity": "sha512-Or/+kvLxNpeQ9DtSydonMxCx+9ZXOswtwJn17SNLvhptaXYDJvkFFP5zbfU/uLmvnBJlI4yrnXRxpdWH/M5tNA==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.1.tgz", + "integrity": "sha512-3iMJ9q0ao7WE9tWcaYKIptkNBuOIcZCCT0d4MRvuuH88fEoEH62IuQe0OtraD3ebQEoTRk8XCBoknUNc1Y67pw==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -11612,16 +11970,17 @@ } }, "node_modules/typed-array-byte-offset": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.0.tgz", - "integrity": "sha512-RD97prjEt9EL8YgAgpOkf3O4IF9lhJFr9g0htQkm0rchFp/Vx7LW5Q8fSXXub7BXAODyUQohRMyOc3faCPd0hg==", + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.2.tgz", + "integrity": "sha512-Ous0vodHa56FviZucS2E63zkgtgrACj7omjwd/8lTEMEPFFyjfixMZ1ZXenpgCFBBt4EC1J2XsyVS2gkG0eTFA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.2", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "has-proto": "^1.0.1", - "is-typed-array": "^1.1.10" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13" }, "engines": { "node": ">= 0.4" @@ -11631,14 +11990,20 @@ } }, "node_modules/typed-array-length": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.4.tgz", - "integrity": "sha512-KjZypGq+I/H7HI5HlOoGHkWUUGq+Q0TPhQurLbyrVrvnKTBgzLhIJ7j6J/XTQOi0d1RjyZ0wdas8bKs2p0x3Ng==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.6.tgz", + "integrity": "sha512-/OxDN6OtAk5KBpGb28T+HZc2M+ADtvRxXrKKbUwtsLgdoxgX13hyy7ek6bFRl5+aBs2yZzB0c4CnQfAtVypW/g==", "dev": true, "dependencies": { - "call-bind": "^1.0.2", + "call-bind": "^1.0.7", "for-each": "^0.3.3", - "is-typed-array": "^1.1.9" + "gopd": "^1.0.1", + "has-proto": "^1.0.3", + "is-typed-array": "^1.1.13", + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" }, "funding": { "url": "https://github.com/sponsors/ljharb" @@ -11659,6 +12024,11 @@ "is-typedarray": "^1.0.0" } }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==" + }, "node_modules/unbox-primitive": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.0.2.tgz", @@ -11674,10 +12044,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/underscore": { + "version": "1.13.7", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.13.7.tgz", + "integrity": "sha512-GMXzWtsc57XAtguZgaQViUOzs0KTkk8ojr3/xAxXLITqf/3EMwxC0inyETfDFjH/Krbhuep0HNbbjI9i/q3F3g==" + }, "node_modules/undici-types": { - "version": "5.25.3", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.25.3.tgz", - "integrity": "sha512-Ga1jfYwRn7+cP9v8auvEXN1rX3sWqlayd4HP7OKk4mZWylEmu3KzXDUGrQUN6Ol7qo1gPvB2e5gX6udnyEPgdA==" + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" }, "node_modules/unified": { "version": "11.0.4", @@ -12246,14 +12621,6 @@ "makeerror": "1.0.12" } }, - "node_modules/wcwidth": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/wcwidth/-/wcwidth-1.0.1.tgz", - "integrity": "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==", - "dependencies": { - "defaults": "^1.0.3" - } - }, "node_modules/which": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", @@ -12291,16 +12658,16 @@ "dev": true }, "node_modules/which-typed-array": { - "version": "1.1.13", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.13.tgz", - "integrity": "sha512-P5Nra0qjSncduVPEAr7xhoF5guty49ArDTwzJ/yNuPIbZppyRxFQsRCWrocxIY+CnMVG+qfbU2FmDKyvSGClow==", + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.15.tgz", + "integrity": "sha512-oV0jmFtUky6CXfkqehVvBP/LSWJ2sy4vWMioiENyJLePrBO/yKyV9OyJySfAKosh+RYkIl5zJCNZ8/4JncrpdA==", "dev": true, "dependencies": { - "available-typed-arrays": "^1.0.5", - "call-bind": "^1.0.4", + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.7", "for-each": "^0.3.3", "gopd": "^1.0.1", - "has-tostringtag": "^1.0.0" + "has-tostringtag": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -12310,15 +12677,15 @@ } }, "node_modules/winston": { - "version": "3.13.0", - "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.0.tgz", - "integrity": "sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==", + "version": "3.14.2", + "resolved": "https://registry.npmjs.org/winston/-/winston-3.14.2.tgz", + "integrity": "sha512-CO8cdpBB2yqzEf8v895L+GNKYJiEq8eKlHU38af3snQBQ+sdAIUepjMSguOIJC7ICbzm0ZI+Af2If4vIJrtmOg==", "dependencies": { "@colors/colors": "^1.6.0", "@dabh/diagnostics": "^2.0.2", "async": "^3.2.3", "is-stream": "^2.0.0", - "logform": "^2.4.0", + "logform": "^2.6.0", "one-time": "^1.0.0", "readable-stream": "^3.4.0", "safe-stable-stringify": "^2.3.1", @@ -12417,6 +12784,11 @@ "integrity": "sha512-huCv9IH9Tcf95zuYCsQraZtWnJvBtLVE0QHMOs8bWyZAFZNDcYjsPq1nEx8jKA9y+Beo9v+7OBPRisQTjinQMw==", "dev": true }, + "node_modules/xmlcreate": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/xmlcreate/-/xmlcreate-2.0.4.tgz", + "integrity": "sha512-nquOebG4sngPmGPICTS5EnxqhKbCmz5Ox5hsszI2T6U5qdrJizBc+0ilYSEjTSzU0yZcmvppztXe/5Al5fUwdg==" + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", @@ -12432,9 +12804,9 @@ "dev": true }, "node_modules/yaml": { - "version": "2.4.5", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.4.5.tgz", - "integrity": "sha512-aBx2bnqDzVOyNKfsysjA2ms5ZlnjSAW2eG3/L5G/CSujfjLJTJsEw1bGw8kCf04KodQWk1pxlGnZ56CRxiawmg==", + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.5.1.tgz", + "integrity": "sha512-bLQOjaX/ADgQ20isPJRvF0iRUHIxVhYvr53Of7wGcWlO2jvtUlH5m87DsmulFVxRpNLOnI4tB6p/oh8D7kpn9Q==", "bin": { "yaml": "bin.mjs" }, @@ -12479,6 +12851,17 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/yoctocolors-cjs": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/yoctocolors-cjs/-/yoctocolors-cjs-2.1.2.tgz", + "integrity": "sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==", + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/zwitch": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz", diff --git a/package.json b/package.json index bea7eecff..93b8c13d0 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@hashgraph/solo", - "version": "0.27.0", + "version": "0.30.1", "description": "An opinionated CLI tool to deploy and manage private Hedera Networks.", "main": "src/index.mjs", "type": "module", @@ -13,18 +13,22 @@ "scripts": { "test": "cross-env NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME=\"Unit Tests\" jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/unit' --testPathIgnorePatterns=\".*/e2e/.*\"", "test-e2e-all": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E All Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e' --testPathIgnorePatterns=\".*/unit/.*\"", - "test-e2e": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e' --testPathIgnorePatterns=\".*/unit/.*\" --testPathIgnorePatterns=\".*/e2e/commands/mirror_node.*\" --testPathIgnorePatterns=\".*/e2e/commands/node.*\" --testPathIgnorePatterns=\".*/e2e/commands/relay.*\"", + "test-e2e-standard": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Standard Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-standard.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-standard' --testPathIgnorePatterns=\".*/unit/.*\" --testPathIgnorePatterns=\".*/e2e/commands/mirror_node.*\" --testPathIgnorePatterns=\".*/e2e/commands/node.*\" --testPathIgnorePatterns=\".*/e2e/commands/separate_node.*\" --testPathIgnorePatterns=\".*/e2e/commands/relay.*\"", "test-e2e-mirror-node": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Mirror Node Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-mirror-node.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-mirror-node' --testRegex=\".*\\/e2e\\/commands\\/mirror_node\\.test\\.mjs\"", - "test-e2e-node-pem-stop-add": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node PEM Stop Add Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-pem-stop-add.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-pem-stop-add' --testRegex=\".*\\/e2e\\/commands\\/node_pem_stop_add\\.test\\.mjs\"", - "test-e2e-node-pfx-kill-add": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node PFX Kill Add Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-pfx-kill-add.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-pfx-kill-add' --testRegex=\".*\\/e2e\\/commands\\/node_pfx_kill_add\\.test\\.mjs\"", - "test-e2e-node-local-build": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node Local Custom Build' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-local-build.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-local-build' --testRegex=\".*\\/e2e\\/commands\\/node-local.*\\.test\\.mjs\"", + "test-e2e-node-pem-stop": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node PEM Stop Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-pem-stop.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-pem-stop' --testRegex=\".*\\/e2e\\/commands\\/node_pem_stop\\.test\\.mjs\"", + "test-e2e-node-pem-kill": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node PEM Kill Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-pem-kill.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-pem-kill' --testRegex=\".*\\/e2e\\/commands\\/node_pem_kill\\.test\\.mjs\"", + "test-e2e-node-local-build": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node Local Build Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-local-build.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-local-build' --testRegex=\".*\\/e2e\\/commands\\/node_local.*\\.test\\.mjs\"", + "test-e2e-node-add": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node Add Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-add.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-add' --testRegex=\".*\\/e2e\\/commands\\/node_add.*\\.test\\.mjs\"", + "test-e2e-node-add-separate-commands": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node Add - Separate commands Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-add-separate-commands.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-add-separate-commands' --testRegex=\".*\\/e2e\\/commands\\/separate_node_add.*\\.test\\.mjs\"", + "test-e2e-node-update": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node Update Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-update.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-update' --testRegex=\".*\\/e2e\\/commands\\/node_update.*\\.test\\.mjs\"", + "test-e2e-node-delete": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Node Delete Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-node-delete.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-node-delete' --testRegex=\".*\\/e2e\\/commands\\/node_delete.*\\.test\\.mjs\"", "test-e2e-relay": "NODE_OPTIONS=--experimental-vm-modules JEST_SUITE_NAME='Jest E2E Relay Tests' JEST_JUNIT_OUTPUT_NAME='junit-e2e-relay.xml' jest --runInBand --detectOpenHandles --forceExit --coverage --coverageDirectory='coverage/e2e-relay' --testRegex=\".*\\/e2e\\/commands\\/relay\\.test\\.mjs\"", "merge-clean": "rm -rf .nyc_output && mkdir .nyc_output && rm -rf coverage/lcov-report && rm -rf coverage/solo && rm coverage/*.*", "merge-e2e": "nyc merge ./coverage/e2e/ .nyc_output/coverage.json", "merge-unit": "nyc merge ./coverage/unit/ .nyc_output/coverage.json", "report-coverage": "npm run merge-clean && npm run merge-unit && npm run merge-e2e && nyc report --reporter=json --reporter=html --reporter=lcov", "solo": "NODE_OPTIONS=--experimental-vm-modules node --no-deprecation solo.mjs", - "check": "remark . --quiet --frail && eslint .", + "check": "rm -rf docs/public/*; remark . --quiet --frail && eslint .; cd docs; jsdoc -c jsdoc.conf.json", "format": "remark . --quiet --frail --output && eslint --fix .", "test-setup": "./test/e2e/setup-e2e.sh", "test-coverage": "npm run test && npm run test-setup && npm run test-e2e-all && npm run report-coverage" @@ -37,46 +41,47 @@ "author": "Swirlds Labs", "license": "Apache2.0", "dependencies": { - "@hashgraph/proto": "^2.15.0", - "@hashgraph/sdk": "^2.47.0", - "@kubernetes/client-node": "^0.21.0", - "@listr2/prompt-adapter-enquirer": "^2.0.8", - "@peculiar/x509": "^1.11.0", - "adm-zip": "^0.5.14", + "@hashgraph/sdk": "^2.51.0", + "@kubernetes/client-node": "^0.22.0", + "@listr2/prompt-adapter-enquirer": "^2.0.11", + "@peculiar/x509": "^1.12.2", + "adm-zip": "^0.5.16", "chalk": "^5.3.0", "dot-object": "^2.1.5", "dotenv": "^16.4.5", "enquirer": "^2.4.1", "esm": "^3.2.25", "figlet": "^1.7.0", - "got": "^14.4.1", - "inquirer": "^9.3.1", + "got": "^14.4.2", + "inquirer": "^11.1.0", "ip": "^2.0.1", + "jsdoc": "^4.0.3", "js-base64": "^3.7.7", "js-yaml": "^4.1.0", - "listr2": "^8.2.1", - "semver": "^7.6.2", - "stream-buffers": "^3.0.2", - "tar": "^7.2.0", + "listr2": "^8.2.4", + "semver": "^7.6.3", + "stream-buffers": "^3.0.3", + "tar": "^7.4.3", "uuid": "^10.0.0", - "winston": "^3.13.0", - "yaml": "^2.4.5", + "winston": "^3.14.2", + "yaml": "^2.5.1", "yargs": "^17.7.2" }, "devDependencies": { "@jest/globals": "^29.7.0", "@jest/test-sequencer": "^29.7.0", "cross-env": "^7.0.3", - "eslint": "^8.57.0", + "eslint": "^8.57.1", "eslint-config-standard": "^17.1.0", "eslint-plugin-headers": "^1.1.2", - "eslint-plugin-import": "^2.29.1", + "eslint-plugin-import": "^2.30.0", "eslint-plugin-n": "^16.6.2", - "eslint-plugin-promise": "^6.2.0", + "eslint-plugin-promise": "^6.6.0", "jest": "^29.7.0", "jest-environment-steps": "^1.1.1", + "jest-expect-message": "^1.1.3", "jest-junit": "^16.0.0", - "nyc": "^17.0.0", + "nyc": "^17.1.0", "remark-cli": "^12.0.1", "remark-lint-list-item-indent": "^4.0.0", "remark-lint-unordered-list-marker-style": "^4.0.0", diff --git a/resources/support-zip.sh b/resources/support-zip.sh new file mode 100644 index 000000000..56da3ab76 --- /dev/null +++ b/resources/support-zip.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# This script creates a zip file so that it can be copied out of the pod for research purposes + +readonly HAPI_DIR=/opt/hgcapp/services-hedera/HapiApp2.0 +readonly RESEARCH_ZIP=${HOSTNAME}.zip +readonly ZIP_FULLPATH=${HAPI_DIR}/${RESEARCH_ZIP} +readonly FILE_LIST=${HAPI_DIR}/support-zip-file-list.txt +readonly CONFIG_TXT=config.txt +readonly SETTINGS_TXT=settings.txt +readonly SETTINGS_USED_TXT=settingsUsed.txt +readonly OUTPUT_DIR=output +readonly DATA_DIR=data +readonly ADDRESS_BOOK_DIR=${DATA_DIR}/saved/address_book +readonly CONFIG_DIR=${DATA_DIR}/config +readonly KEYS_DIR=${DATA_DIR}/keys +readonly UPGRADE_DIR=${DATA_DIR}/upgrade +readonly JOURNAL_CTL_LOG=${OUTPUT_DIR}/journalctl.log + +AddToFileList() +{ + if [[ -d "${1}" ]];then + find "${1}" -name "*" -printf '\047%p\047\n' >>${FILE_LIST} + return + fi + + if [[ -f "${1}" ]];then + find . -maxdepth 1 -type f -name "${1}" -print >>${FILE_LIST} + else + echo "skipping: ${1}, file or directory not found" + fi +} + +cd ${HAPI_DIR} +echo -n > ${FILE_LIST} +journalctl > ${JOURNAL_CTL_LOG} +AddToFileList ${CONFIG_TXT} +AddToFileList ${SETTINGS_TXT} +AddToFileList ${SETTINGS_USED_TXT} +AddToFileList ${OUTPUT_DIR} +AddToFileList ${ADDRESS_BOOK_DIR} +AddToFileList ${CONFIG_DIR} +AddToFileList ${KEYS_DIR} +AddToFileList ${UPGRADE_DIR} +jar cvfM "${ZIP_FULLPATH}" "@${FILE_LIST}" diff --git a/resources/templates/application.properties b/resources/templates/application.properties index fd3777b1d..ae8714f90 100644 --- a/resources/templates/application.properties +++ b/resources/templates/application.properties @@ -1,2 +1,17 @@ -autoRenew.targetTypes= hedera.config.version=0 +ledger.id=0x01 +netty.mode=TEST +contracts.chainId=298 +hedera.recordStream.logPeriod=1 +balances.exportPeriodSecs=400 +files.maxSizeKb=2048 +hedera.recordStream.compressFilesOnCreation=true +balances.compressOnCreation=true +contracts.maxNumWithHapiSigsAccess=0 +autoRenew.targetTypes= +nodes.gossipFqdnRestricted=false +hedera.profiles.active=TEST +# TODO: this is a workaround until prepareUpgrade freeze will recalculate the weight prior to writing the config.txt +staking.periodMins=1 +nodes.updateAccountIdAllowed=true + diff --git a/resources/templates/bootstrap.properties b/resources/templates/bootstrap.properties index 74d317b61..9e85f2be2 100644 --- a/resources/templates/bootstrap.properties +++ b/resources/templates/bootstrap.properties @@ -1,5 +1,7 @@ +# TODO: bootstrap.properties is scheduled to go away, will need to delete this and related code ledger.id=0x01 netty.mode=DEV +# TODO: if chain ID is passed in as a flag, this is not getting updated contracts.chainId=298 hedera.recordStream.logPeriod=1 balances.exportPeriodSecs=400 diff --git a/resources/templates/config.template b/resources/templates/config.template deleted file mode 100644 index 142933e96..000000000 --- a/resources/templates/config.template +++ /dev/null @@ -1,62 +0,0 @@ -# ====================================================================================================================== -# Address book Format Description -# ====================================================================================================================== -# Address book format varies across versions since it evolved over time. As of July 27, 2023 the below formats were -# relevant for recent versions. Latest version is available in the file: hedera-services/hedera-node/config.txt -# -# - v.0.39.* (or before) format: -# Fields: address, , , , , , , , -# Example: address, 0, node0, 1, 10.128.0.27, 50111, 35.223.93.31, 30124, 0.0.3 -# -# - v.0.4* format: -# Fields: address, , , , , , , , , -# Example: address, 0, n0, node0, 1, 10.128.0.27, 50111, 35.223.93.31, 30124, 0.0.3 -# -# - v.0.41* (onward) we need to append the below formatted line with next node ID after the list of "address" lines -#

-# nextNodeId, -# -# Field descriptions: -# =========================== -# NODE_ID: This increments for each node and starts from 0. -# NEXT_NODE_ID: The id for the next node (i.e. last node ID + 1) -# NODE_NICK_NAME: This is a string (alphanumeric). e.g. node0 -# NODE_NAME: This is a string (alphanumeric). e.g. node0 or n0 -# NODE_STAKE_AMOUNT: A long value. e.g. 1 or a larger number -# INTERNAL_IP: This is the pod IP -# INTERNAL_GOSSIP_PORT: Default gossip port is 50111. So use the exposed port that is mapped to 50111 in container. -# EXTERNAL_IP: This is the service IP -# EXTERNAL_GOSSIP_PORT: Default gossip port is 50111. This is usually same as INTERNAL_GOSSIP_PORT unless mapped differently. -# ACCOUNT_ID: Must start from 0.0.3 -# - -# Account restrictions: -# =========================== -# All ACCOUNT_ID should start from 0.0.3 because of restricted accounts as below: -# - 0.0.0 restricted and not usable -# - 0.0.1 minting account and not usable -# - 0.0.2 treasury account -# -# Default Ports -# =========================== -# We only need to specify the gossip port (INTERNAL_GOSSIP_PORT, EXTERNAL_GOSSIP_PORT). Below are some details on other -# ports that a node may expose: -# - 50111: gossip port -# - 50211: grpc non-tls (for platform services) -# - 50212: grpc tls (for platform services) -# -# IP Address -# =========================== -# When deploying in a kubernetes cluster, we need to use the following IP mapping: -# - INTERNAL_IP: This should be the Pod IP exposing gossip port (i.e. 50111) -# - EXTERNAL_IP: This should be the cluster IP of the service exposing gossip port (i.e. 50111) -# -# -# Example config.txt (for v0.4* onward) -# =========================== -# swirld, 123 -# app, HederaNode.jar -# address, 0, node0, node0, 1, 10.244.0.197, 56789, 10.96.61.84, 50111, 0.0.0 -# address, 1, node1, node1, 1, 10.244.0.198, 56789, 10.96.163.93, 50111, 0.0.1 -# nextNodeId, 2 -# ====================================================================================================================== diff --git a/resources/templates/log4j2.xml b/resources/templates/log4j2.xml index cb70b0a88..1234fc7e0 100644 --- a/resources/templates/log4j2.xml +++ b/resources/templates/log4j2.xml @@ -12,6 +12,7 @@ %d{yyyy-MM-dd HH:mm:ss.SSS} %-5p %-4L %c{1} - %m{nolookups}%n + @@ -35,6 +36,7 @@ %d{yyyy-MM-dd HH:mm:ss.SSS} %-8sn %-5p %-16marker <%t> %c{1}: %msg{nolookups}%n + @@ -91,7 +93,7 @@ - + diff --git a/resources/templates/settings.txt b/resources/templates/settings.txt index 7ecdc3ccf..3b34a834b 100644 --- a/resources/templates/settings.txt +++ b/resources/templates/settings.txt @@ -9,3 +9,6 @@ state.mainClassNameOverride, com.hedera.services.ServicesMain ############################# crypto.enableNewKeyStoreModel, true + +# TODO: remove this? only defaults to true when going from 0.52 to 0.53 +event.migrateEventHashing, false diff --git a/solo-compose/docker-compose.yml b/solo-compose/docker-compose.yml new file mode 100644 index 000000000..25dbb0d0c --- /dev/null +++ b/solo-compose/docker-compose.yml @@ -0,0 +1,54 @@ +services: + # Init Containers + + init-dind-externals: + image: "ghcr.io/hashgraph/runner-images/scaleset-runner:ubuntu-22.04" + platform: linux/amd64 + command: [ "sudo", "sh", "-c", "chmod -R 777 /home/runner/tmpDir && cp -r /home/runner/externals/. /home/runner/tmpDir/" ] + privileged: true + volumes: + - dind-externals:/home/runner/tmpDir + depends_on: + - dind + + init-work-directory: + image: "ghcr.io/hashgraph/runner-images/scaleset-runner:ubuntu-22.04" + command: [ "cp", "-r", "/home/runner/.", "/tmp/work/" ] + platform: linux/amd64 + privileged: true + volumes: + - work:/tmp/work + depends_on: + - runner + + # Containers + + runner: + image: "ghcr.io/hashgraph/runner-images/scaleset-runner:ubuntu-22.04" + command: [ "/usr/bin/env", "bash", "/home/runner/setup-runner.sh" ] + platform: linux/amd64 + environment: + - DOCKER_HOST=unix:///var/run/solo-compose.sock + privileged: true + volumes: + - work:/home/runner/_work + - dind-sock:/var/run + - type: bind + source: setup-runner.sh + target: /home/runner/setup-runner.sh + + dind: + image: "docker:20.10-dind" + command: [ "dockerd", "--host=unix:///var/run/solo-compose.sock", "--group=123", "--registry-mirror=https://hub.mirror.solo-compose.lat.ope.eng.hashgraph.io" ] + environment: + - DOCKER_GROUP_GID=123 + privileged: true + volumes: + - work:/home/runner/_work + - dind-sock:/var/run + - dind-externals:/home/runner/externals + +volumes: + work: + dind-sock: + dind-externals: diff --git a/solo-compose/setup-runner.sh b/solo-compose/setup-runner.sh new file mode 100644 index 000000000..cc569fd69 --- /dev/null +++ b/solo-compose/setup-runner.sh @@ -0,0 +1,60 @@ +#! /usr/bin/env bash + +BRANCH_NAME="main" + +# Clone repository +echo "***** Setting up Solo Repository *****" + +git clone https://github.com/hashgraph/solo.git solo +cd ./solo || exit +git checkout "${BRANCH_NAME}" + +# Dependencies Versions +NODE_VERSION="20.17.0" +KIND_VERSION="v0.22.0" +HELM_VERSION="3.15.4" + +echo "***** Setup *****" + +# Setup Corepack +sudo ln -s /home/runner/_work/_tool/node/${NODE_VERSION}/x64/bin/corepack /usr/bin/corepack +echo "Corepack setup completed." + +# Setup NPM +sudo ln -s /home/runner/_work/_tool/node/${NODE_VERSION}/x64/bin/npm /usr/bin/npm +echo "NPM setup completed." + +# Setup NPX +sudo ln -s /home/runner/_work/_tool/node/${NODE_VERSION}/x64/bin/npx /usr/bin/npx +echo "NPX setup completed." + +# Setup Node.js +sudo ln -s /home/runner/_work/_tool/node/${NODE_VERSION}/x64/bin/node /usr/bin/node +echo "Node.js setup completed." + +# Setup Kind +sudo ln -s /home/runner/_work/_tool/kind/${KIND_VERSION}/amd64/kind /usr/bin/kind +echo "Kind setup completed." + +# Setup Kubectl +sudo ln -s /home/runner/_work/_tool/kind/${KIND_VERSION}/amd64/kubectl /usr/bin/kubectl +echo "Kubectl setup completed." + +# Setup Helm +sudo ln -s /home/runner/_work/_tool/helm/${HELM_VERSION}/x64/linux-amd64/helm /usr/bin/helm +echo "Helm setup completed." + +# Install Dependencies +echo "Installing dependencies..." +npm ci +echo "Dependencies installed successfully." + +##################################################### + +# Example +npm run test + +##################################################### + +# Prevent Service from exiting +/usr/bin/env sleep infinity \ No newline at end of file diff --git a/src/commands/account.mjs b/src/commands/account.mjs index c0eec4884..ff0480ba8 100644 --- a/src/commands/account.mjs +++ b/src/commands/account.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import chalk from 'chalk' import { BaseCommand } from './base.mjs' import { FullstackTestingError, IllegalArgumentError } from '../core/errors.mjs' @@ -22,8 +23,13 @@ import { Listr } from 'listr2' import * as prompts from './prompts.mjs' import { constants } from '../core/index.mjs' import { AccountInfo, HbarUnit, PrivateKey } from '@hashgraph/sdk' +import { FREEZE_ADMIN_ACCOUNT } from '../core/constants.mjs' export class AccountCommand extends BaseCommand { + /** + * @param {{accountManager: AccountManager, logger: Logger, helm: Helm, k8: K8, chartManager: ChartManager, configManager: ConfigManager, depManager: DependencyManager}} opts + * @param {number[][]} [systemAccounts] + */ constructor (opts, systemAccounts = constants.SYSTEM_ACCOUNTS) { super(opts) @@ -34,10 +40,19 @@ export class AccountCommand extends BaseCommand { this.systemAccounts = systemAccounts } + /** + * @returns {Promise} + */ async closeConnections () { await this.accountManager.close() } + /** + * @param {AccountInfo} accountInfo + * @param {string} namespace + * @param {boolean} shouldRetrievePrivateKey + * @returns {Promise<{accountId: string, balance: number, publicKey: string}>} + */ async buildAccountInfo (accountInfo, namespace, shouldRetrievePrivateKey) { if (!accountInfo || !(accountInfo instanceof AccountInfo)) throw new IllegalArgumentError('An instance of AccountInfo is required') @@ -55,6 +70,10 @@ export class AccountCommand extends BaseCommand { return newAccountInfo } + /** + * @param {any} ctx + * @returns {Promise<{accountId: AccountId, privateKey: string, publicKey: string, balance: number}>} + */ async createNewAccount (ctx) { if (ctx.config.ecdsaPrivateKey) { ctx.privateKey = PrivateKey.fromStringECDSA(ctx.config.ecdsaPrivateKey) @@ -68,10 +87,18 @@ export class AccountCommand extends BaseCommand { ctx.privateKey, ctx.config.amount, ctx.config.ecdsaPrivateKey ? ctx.config.setAlias : false) } + /** + * @param {any} ctx + * @returns {Promise} + */ async getAccountInfo (ctx) { return this.accountManager.accountInfoQuery(ctx.config.accountId) } + /** + * @param {any} ctx + * @returns {Promise} + */ async updateAccountInfo (ctx) { let amount = ctx.config.amount if (ctx.config.privateKey) { @@ -98,10 +125,19 @@ export class AccountCommand extends BaseCommand { return true } + /** + * @param {AccountId} toAccountId + * @param {number} amount + * @returns {Promise} + */ async transferAmountFromOperator (toAccountId, amount) { return await this.accountManager.transferAmount(constants.TREASURY_ACCOUNT_ID, toAccountId, amount) } + /** + * @param {Object} argv + * @returns {Promise} + */ async init (argv) { const self = this @@ -147,6 +183,9 @@ export class AccountCommand extends BaseCommand { fulfilledCount: 0, skippedCount: 0 } + + // do a write transaction to trigger the handler and generate the system accounts to complete genesis + await self.accountManager.transferAmount(constants.TREASURY_ACCOUNT_ID, FREEZE_ADMIN_ACCOUNT, 1) } }, { @@ -216,6 +255,10 @@ export class AccountCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async create (argv) { const self = this @@ -278,6 +321,10 @@ export class AccountCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async update (argv) { const self = this @@ -347,6 +394,10 @@ export class AccountCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async get (argv) { const self = this @@ -402,15 +453,16 @@ export class AccountCommand extends BaseCommand { /** * Return Yargs command definition for 'node' command - * @param accountCmd an instance of NodeCommand + * @param {AccountCommand} accountCmd an instance of NodeCommand + * @returns {{command: string, desc: string, builder: Function}} */ static getCommandDefinition (accountCmd) { - if (!accountCmd | !(accountCmd instanceof AccountCommand)) { + if (!accountCmd || !(accountCmd instanceof AccountCommand)) { throw new IllegalArgumentError('An instance of AccountCommand is required', accountCmd) } return { command: 'account', - desc: 'Manage Hedera accounts in fullstack testing network', + desc: 'Manage Hedera accounts in solo network', builder: yargs => { return yargs .command({ diff --git a/src/commands/base.mjs b/src/commands/base.mjs index 8deb79718..94c768fa0 100644 --- a/src/commands/base.mjs +++ b/src/commands/base.mjs @@ -20,6 +20,12 @@ import { MissingArgumentError } from '../core/errors.mjs' import { ShellRunner } from '../core/shell_runner.mjs' export class BaseCommand extends ShellRunner { + /** + * @param {string} chartDir + * @param {string} chartRepo + * @param {string} chartReleaseName + * @returns {Promise} + */ async prepareChartPath (chartDir, chartRepo, chartReleaseName) { if (!chartRepo) throw new MissingArgumentError('chart repo name is required') if (!chartReleaseName) throw new MissingArgumentError('chart release name is required') @@ -33,6 +39,10 @@ export class BaseCommand extends ShellRunner { return `${chartRepo}/${chartReleaseName}` } + /** + * @param {string} valuesFile + * @returns {string} + */ prepareValuesFiles (valuesFile) { let valuesArg = '' if (valuesFile) { @@ -46,6 +56,9 @@ export class BaseCommand extends ShellRunner { return valuesArg } + /** + * @param {{logger: Logger, helm: Helm, k8: K8, chartManager: ChartManager, configManager: ConfigManager, depManager: DependencyManager}} opts + */ constructor (opts) { if (!opts || !opts.logger) throw new Error('An instance of core/Logger is required') if (!opts || !opts.helm) throw new Error('An instance of core/Helm is required') @@ -57,9 +70,97 @@ export class BaseCommand extends ShellRunner { super(opts.logger) this.helm = opts.helm - this.k8 = opts.k8 + this.k8 = /** @type {K8} **/ opts.k8 this.chartManager = opts.chartManager this.configManager = opts.configManager this.depManager = opts.depManager + this._configMaps = new Map() + } + + /** + * Dynamically builds a class with properties from the provided list of flags + * and extra properties, will keep track of which properties are used. Call + * getUnusedConfigs() to get an array of unused properties. + * + * @param {string} configName the name of the configuration + * @param {CommandFlag[]} flags an array of flags + * @param {string[]} [extraProperties] an array of extra properties + * @returns {Object} the instance of the new class + */ + getConfig (configName, flags, extraProperties = []) { + const configManager = this.configManager + + // build the dynamic class that will keep track of which properties are used + const NewConfigClass = class { + constructor () { + // the map to keep track of which properties are used + this.usedConfigs = new Map() + + // add the flags as properties to this class + flags?.forEach(flag => { + this[`_${flag.constName}`] = configManager.getFlag(flag) + Object.defineProperty(this, flag.constName, { + get () { + this.usedConfigs.set(flag.constName, this.usedConfigs.get(flag.constName) + 1 || 1) + return this[`_${flag.constName}`] + } + }) + }) + + // add the extra properties as properties to this class + extraProperties?.forEach(name => { + this[`_${name}`] = '' + Object.defineProperty(this, name, { + get () { + this.usedConfigs.set(name, this.usedConfigs.get(name) + 1 || 1) + return this[`_${name}`] + }, + set (value) { + this[`_${name}`] = value + } + }) + }) + } + + /** + * Get the list of unused configurations that were not accessed + * @returns {string[]} an array of unused configurations + */ + getUnusedConfigs () { + const unusedConfigs = [] + + // add the flag constName to the unusedConfigs array if it was not accessed + flags?.forEach(flag => { + if (!this.usedConfigs.has(flag.constName)) { + unusedConfigs.push(flag.constName) + } + }) + + // add the extra properties to the unusedConfigs array if it was not accessed + extraProperties?.forEach(item => { + if (!this.usedConfigs.has(item)) { + unusedConfigs.push(item) + } + }) + return unusedConfigs + } + } + + const newConfigInstance = new NewConfigClass() + + // add the new instance to the configMaps so that it can be used to get the + // unused configurations using the configName from the BaseCommand + this._configMaps.set(configName, newConfigInstance) + + return newConfigInstance + } + + /** + * Get the list of unused configurations that were not accessed + * @param {string} configName + * @returns {string[]} an array of unused configurations + */ + getUnusedConfigs (configName) { + return this._configMaps.get(configName).getUnusedConfigs() } } diff --git a/src/commands/cluster.mjs b/src/commands/cluster.mjs index 3c4ae033e..95540147e 100644 --- a/src/commands/cluster.mjs +++ b/src/commands/cluster.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import { ListrEnquirerPromptAdapter } from '@listr2/prompt-adapter-enquirer' import { Listr } from 'listr2' import { FullstackTestingError, IllegalArgumentError } from '../core/errors.mjs' @@ -22,11 +23,15 @@ import { BaseCommand } from './base.mjs' import chalk from 'chalk' import { constants } from '../core/index.mjs' import * as prompts from './prompts.mjs' +import path from 'path' /** * Define the core functionalities of 'cluster' command */ export class ClusterCommand extends BaseCommand { + /** + * @returns {Promise} + */ async showClusterList () { this.logger.showList('Clusters', await this.k8.getClusters()) return true @@ -51,7 +56,7 @@ export class ClusterCommand extends BaseCommand { /** * Show list of installed chart - * @param clusterSetupNamespace + * @param {string} clusterSetupNamespace */ async showInstalledChartList (clusterSetupNamespace) { this.logger.showList('Installed Charts', await this.chartManager.getInstalledCharts(clusterSetupNamespace)) @@ -59,7 +64,7 @@ export class ClusterCommand extends BaseCommand { /** * Setup cluster with shared components - * @param argv + * @param {Object} argv * @returns {Promise} */ async setup (argv) { @@ -154,7 +159,7 @@ export class ClusterCommand extends BaseCommand { /** * Uninstall shared components from the cluster and perform any other necessary cleanups - * @param argv + * @param {Object} argv * @returns {Promise} */ async reset (argv) { @@ -217,7 +222,8 @@ export class ClusterCommand extends BaseCommand { /** * Return Yargs command definition for 'cluster' command - * @param clusterCmd an instance of ClusterCommand + * @param {ClusterCommand} clusterCmd - an instance of ClusterCommand + * @returns {{command: string, desc: string, builder: Function}} */ static getCommandDefinition (clusterCmd) { if (!clusterCmd || !(clusterCmd instanceof ClusterCommand)) { @@ -314,11 +320,11 @@ export class ClusterCommand extends BaseCommand { /** * Prepare values arg for cluster setup command * - * @param chartDir local charts directory (default is empty) - * @param prometheusStackEnabled a bool to denote whether to install prometheus stack - * @param minioEnabled a bool to denote whether to install minio - * @param certManagerEnabled a bool to denote whether to install cert manager - * @param certManagerCrdsEnabled a bool to denote whether to install cert manager CRDs + * @param {string} chartDir - local charts directory (default is empty) + * @param {boolean} prometheusStackEnabled - a bool to denote whether to install prometheus stack + * @param {boolean} minioEnabled - a bool to denote whether to install minio + * @param {boolean} certManagerEnabled - a bool to denote whether to install cert manager + * @param {boolean} certManagerCrdsEnabled - a bool to denote whether to install cert manager CRDs * @returns {string} */ prepareValuesArg (chartDir = flags.chartDirectory.definition.default, @@ -329,7 +335,7 @@ export class ClusterCommand extends BaseCommand { ) { let valuesArg = '' if (chartDir) { - valuesArg = `-f ${chartDir}/fullstack-cluster-setup/values.yaml` + valuesArg = `-f ${path.join(chartDir, 'fullstack-cluster-setup', 'values.yaml')}` } valuesArg += ` --set cloud.prometheusStack.enabled=${prometheusStackEnabled}` @@ -347,13 +353,13 @@ export class ClusterCommand extends BaseCommand { /** * Prepare chart path - * @param chartDir local charts directory (default is empty) + * @param {string} [chartDir] - local charts directory (default is empty) * @returns {Promise} */ async prepareChartPath (chartDir = flags.chartDirectory.definition.default) { let chartPath = 'full-stack-testing/fullstack-cluster-setup' if (chartDir) { - chartPath = `${chartDir}/fullstack-cluster-setup` + chartPath = path.join(chartDir, 'fullstack-cluster-setup') await this.helm.dependency('update', chartPath) } diff --git a/src/commands/flags.mjs b/src/commands/flags.mjs index 6955076b4..a9bb67466 100644 --- a/src/commands/flags.mjs +++ b/src/commands/flags.mjs @@ -14,9 +14,11 @@ * limitations under the License. * */ +'use strict' import { constants } from '../core/index.mjs' import * as core from '../core/index.mjs' import * as version from '../../version.mjs' +import path from 'path' /** * @typedef {Object} CommandFlag @@ -93,18 +95,6 @@ export const namespace = { } } -/** @type {CommandFlag} **/ -export const deployMirrorNode = { - constName: 'deployMirrorNode', - name: 'mirror-node', - definition: { - describe: 'Deploy mirror node', - defaultValue: true, - alias: 'm', - type: 'boolean' - } -} - /** @type {CommandFlag} **/ export const deployHederaExplorer = { constName: 'deployHederaExplorer', @@ -240,7 +230,7 @@ export const relayReleaseTag = { name: 'relay-release', definition: { describe: 'Relay release tag to be used (e.g. v0.48.0)', - defaultValue: 'v0.48.1', + defaultValue: 'v0.53.0', type: 'string' } } @@ -309,7 +299,7 @@ export const chainId = { name: 'ledger-id', definition: { describe: 'Ledger ID (a.k.a. Chain ID)', - defaultValue: '298', // Ref: https://github.com/hashgraph/hedera-json-rpc-relay#configuration + defaultValue: constants.HEDERA_CHAIN_ID, // Ref: https://github.com/hashgraph/hedera-json-rpc-relay#configuration alias: 'l', type: 'string' } @@ -361,17 +351,6 @@ export const generateTlsKeys = { } } -/** @type {CommandFlag} **/ -export const keyFormat = { - constName: 'keyFormat', - name: 'key-format', - definition: { - describe: 'Public and Private key file format (pem or pfx)', - defaultValue: 'pem', - type: 'string' - } -} - /** @type {CommandFlag} **/ export const tlsClusterIssuerType = { constName: 'tlsClusterIssuerType', @@ -455,7 +434,7 @@ export const applicationProperties = { name: 'application-properties', definition: { describe: 'application.properties file for node', - defaultValue: `${constants.SOLO_CACHE_DIR}/templates/application.properties`, + defaultValue: path.join(constants.SOLO_CACHE_DIR, 'templates', 'application.properties'), type: 'string' } } @@ -477,7 +456,7 @@ export const apiPermissionProperties = { name: 'api-permission-properties', definition: { describe: 'api-permission.properties file for node', - defaultValue: `${constants.SOLO_CACHE_DIR}/templates/api-permission.properties`, + defaultValue: path.join(constants.SOLO_CACHE_DIR, 'templates', 'api-permission.properties'), type: 'string' } } @@ -488,7 +467,7 @@ export const bootstrapProperties = { name: 'bootstrap-properties', definition: { describe: 'bootstrap.properties file for node', - defaultValue: `${constants.SOLO_CACHE_DIR}/templates/bootstrap.properties`, + defaultValue: path.join(constants.SOLO_CACHE_DIR, 'templates', 'bootstrap.properties'), type: 'string' } } @@ -499,7 +478,7 @@ export const settingTxt = { name: 'settings-txt', definition: { describe: 'settings.txt file for node', - defaultValue: `${constants.SOLO_CACHE_DIR}/templates/settings.txt`, + defaultValue: path.join(constants.SOLO_CACHE_DIR, 'templates', 'settings.txt'), type: 'string' } } @@ -510,7 +489,7 @@ export const app = { name: 'app', definition: { describe: 'Testing app name', - defaultValue: '', + defaultValue: constants.HEDERA_APP_NAME, type: 'string' } } @@ -537,13 +516,79 @@ export const localBuildPath = { } } +/** @type {CommandFlag} **/ +export const newAccountNumber = { + constName: 'newAccountNumber', + name: 'new-account-number', + definition: { + describe: 'new account number for node update transaction', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const newAdminKey = { + constName: 'newAdminKey', + name: 'new-admin-key', + definition: { + describe: 'new admin key for the Hedera account', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const gossipPublicKey = { + constName: 'gossipPublicKey', + name: 'gossip-public-key', + definition: { + describe: 'path and file name of the public key for signing gossip in PEM key format to be used', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const gossipPrivateKey = { + constName: 'gossipPrivateKey', + name: 'gossip-private-key', + definition: { + describe: 'path and file name of the private key for signing gossip in PEM key format to be used', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const tlsPublicKey = { + constName: 'tlsPublicKey', + name: 'tls-public-key', + definition: { + describe: 'path and file name of the public TLS key to be used', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const tlsPrivateKey = { + constName: 'tlsPrivateKey', + name: 'tls-private-key', + definition: { + describe: 'path and file name of the private TLS key to be used', + defaultValue: '', + type: 'string' + } +} + /** @type {CommandFlag} **/ export const log4j2Xml = { constName: 'log4j2Xml', name: 'log4j2-xml', definition: { describe: 'log4j2.xml file for node', - defaultValue: `${constants.SOLO_CACHE_DIR}/templates/log4j2.xml`, + defaultValue: path.join(constants.SOLO_CACHE_DIR, 'templates', 'log4j2.xml'), type: 'string' } } @@ -614,6 +659,112 @@ export const amount = { } } +/** @type {CommandFlag} **/ +export const nodeID = { + constName: 'nodeId', + name: 'node-id', + definition: { + describe: 'Node id (e.g. node99)', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const gossipEndpoints = { + constName: 'gossipEndpoints', + name: 'gossip-endpoints', + definition: { + describe: 'Comma separated gossip endpoints of the node(e.g. first one is internal, second one is external)', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const grpcEndpoints = { + constName: 'grpcEndpoints', + name: 'grpc-endpoints', + definition: { + describe: 'Comma separated gRPC endpoints of the node (at most 8)', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const endpointType = { + constName: 'endpointType', + name: 'endpoint-type', + definition: { + describe: 'Endpoint type (IP or FQDN)', + defaultValue: constants.ENDPOINT_TYPE_FQDN, + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const persistentVolumeClaims = { + constName: 'persistentVolumeClaims', + name: 'pvcs', + definition: { + describe: 'Enable persistent volume claims to store data outside the pod, required for node add', + defaultValue: false, + type: 'boolean' + } +} + +/** @type {CommandFlag} **/ +export const debugNodeId = { + constName: 'debugNodeId', + name: 'debug-nodeid', + definition: { + describe: 'Enable default jvm debug port (5005) for the given node id', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const outputDir = { + constName: 'outputDir', + name: 'output-dir', + definition: { + describe: 'Path to the directory where the command context will be saved to', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const inputDir = { + constName: 'inputDir', + name: 'input-dir', + definition: { + describe: 'Path to the directory where the command context will be loaded from', + defaultValue: '', + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const adminKey = { + constName: 'adminKey', + name: 'admin-key', + definition: { + describe: 'Admin key', + defaultValue: constants.GENESIS_KEY, + type: 'string' + } +} + +/** @type {CommandFlag} **/ +export const mirrorNodeVersion = { + constName: 'mirrorNodeVersion', + name: 'mirror-node-version', + definition: { + describe: 'Mirror node chart version', + defaultValue: '', + type: 'string' + } +} /** @type {CommandFlag[]} **/ export const allFlags = [ accountId, @@ -636,24 +787,32 @@ export const allFlags = [ deployHederaExplorer, deployJsonRpcRelay, deployMinio, - deployMirrorNode, deployPrometheusStack, devMode, ecdsaPrivateKey, enableHederaExplorerTls, enablePrometheusSvcMonitor, + endpointType, fstChartVersion, generateGossipKeys, generateTlsKeys, + gossipEndpoints, + gossipPrivateKey, + gossipPublicKey, + grpcEndpoints, hederaExplorerTlsHostName, hederaExplorerTlsLoadBalancerIp, - keyFormat, + debugNodeId, localBuildPath, log4j2Xml, namespace, + newAccountNumber, + newAdminKey, + nodeID, nodeIDs, operatorId, operatorKey, + persistentVolumeClaims, privateKey, profileFile, profileName, @@ -663,10 +822,24 @@ export const allFlags = [ setAlias, settingTxt, tlsClusterIssuerType, + tlsPrivateKey, + tlsPublicKey, updateAccountKeys, - valuesFile + valuesFile, + mirrorNodeVersion ] +/** + * Resets the definition.disablePrompt for all flags + */ +export function resetDisabledPrompts () { + allFlags.forEach(f => { + if (f.definition.disablePrompt) { + delete f.definition.disablePrompt + } + }) +} + export const allFlagsMap = new Map(allFlags.map(f => [f.name, f])) export const nodeConfigFileFlags = new Map([ diff --git a/src/commands/index.mjs b/src/commands/index.mjs index abb7942e1..a181b6404 100644 --- a/src/commands/index.mjs +++ b/src/commands/index.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import { ClusterCommand } from './cluster.mjs' import { InitCommand } from './init.mjs' import { MirrorNodeCommand } from './mirror_node.mjs' @@ -23,9 +24,10 @@ import { RelayCommand } from './relay.mjs' import { AccountCommand } from './account.mjs' import * as flags from './flags.mjs' -/* +/** * Return a list of Yargs command builder to be exposed through CLI - * @param opts it is an Options object containing logger + * @param {Object} opts it is an Options object containing logger + * @returns {Array} an array of Yargs command builder */ function Initialize (opts) { const initCmd = new InitCommand(opts) diff --git a/src/commands/init.mjs b/src/commands/init.mjs index f46221487..e52d12215 100644 --- a/src/commands/init.mjs +++ b/src/commands/init.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import { Listr } from 'listr2' import path from 'path' import { BaseCommand } from './base.mjs' @@ -30,7 +31,8 @@ import chalk from 'chalk' export class InitCommand extends BaseCommand { /** * Setup home directories - * @param dirs a list of directories that need to be created in sequence + * @param {string[]} dirs a list of directories that need to be created in sequence + * @returns {string[]} */ setupHomeDirectory (dirs = [ constants.SOLO_HOME_DIR, @@ -57,6 +59,7 @@ export class InitCommand extends BaseCommand { /** * Executes the init CLI command + * @param {Object} argv * @returns {Promise} */ async init (argv) { @@ -141,7 +144,8 @@ export class InitCommand extends BaseCommand { /** * Return Yargs command definition for 'init' command - * @param initCmd an instance of InitCommand + * @param {InitCommand} initCmd - an instance of InitCommand + * @returns A object representing the Yargs command definition */ static getCommandDefinition (initCmd) { if (!initCmd || !(initCmd instanceof InitCommand)) { @@ -157,7 +161,6 @@ export class InitCommand extends BaseCommand { flags.chartDirectory, flags.clusterSetupNamespace, flags.fstChartVersion, - flags.keyFormat, flags.namespace, flags.nodeIDs, flags.profileFile, diff --git a/src/commands/mirror_node.mjs b/src/commands/mirror_node.mjs index ce0a3c233..498fc4d0a 100644 --- a/src/commands/mirror_node.mjs +++ b/src/commands/mirror_node.mjs @@ -17,12 +17,18 @@ import { ListrEnquirerPromptAdapter } from '@listr2/prompt-adapter-enquirer' import { Listr } from 'listr2' import { FullstackTestingError, IllegalArgumentError, MissingArgumentError } from '../core/errors.mjs' -import { Templates, constants } from '../core/index.mjs' +import { constants } from '../core/index.mjs' import { BaseCommand } from './base.mjs' import * as flags from './flags.mjs' import * as prompts from './prompts.mjs' +import { getFileContents, getEnvValue } from '../core/helpers.mjs' export class MirrorNodeCommand extends BaseCommand { + /** + * @param {{accountManager: AccountManager, profileManager: ProfileManager, logger: Logger, helm: Helm, k8: K8, + * hartManager: ChartManager, configManager: ConfigManager, depManager: DependencyManager, + * downloader: PackageDownloader}} opts + */ constructor (opts) { super(opts) if (!opts || !opts.accountManager) throw new IllegalArgumentError('An instance of core/AccountManager is required', opts.accountManager) @@ -32,22 +38,104 @@ export class MirrorNodeCommand extends BaseCommand { this.profileManager = opts.profileManager } - async prepareValuesArg (valuesFile, deployHederaExplorer) { + /** + * @returns {string} + */ + static get DEPLOY_CONFIGS_NAME () { + return 'deployConfigs' + } + + /** + * @returns {CommandFlag[]} + */ + static get DEPLOY_FLAGS_LIST () { + return [ + flags.chartDirectory, + flags.deployHederaExplorer, + flags.enableHederaExplorerTls, + flags.fstChartVersion, + flags.hederaExplorerTlsHostName, + flags.hederaExplorerTlsLoadBalancerIp, + flags.namespace, + flags.profileFile, + flags.profileName, + flags.tlsClusterIssuerType, + flags.valuesFile, + flags.mirrorNodeVersion + ] + } + + /** + * @param {string} tlsClusterIssuerType + * @param {boolean} enableHederaExplorerTls + * @param {string} namespace + * @param {string} hederaExplorerTlsLoadBalancerIp + * @param {string} hederaExplorerTlsHostName + * @returns {string} + */ + getTlsValueArguments (tlsClusterIssuerType, enableHederaExplorerTls, namespace, hederaExplorerTlsLoadBalancerIp, hederaExplorerTlsHostName) { let valuesArg = '' - if (valuesFile) { - valuesArg += this.prepareValuesFiles(valuesFile) + + if (enableHederaExplorerTls) { + if (!['acme-staging', 'acme-prod', 'self-signed'].includes(tlsClusterIssuerType)) { + throw new Error(`Invalid TLS cluster issuer type: ${tlsClusterIssuerType}, must be one of: "acme-staging", "acme-prod", or "self-signed"`) + } + + valuesArg += ' --set hedera-explorer.ingress.enabled=true' + valuesArg += ' --set cloud.haproxyIngressController.enabled=true' + valuesArg += ` --set global.ingressClassName=${namespace}-hedera-explorer-ingress-class` + valuesArg += ` --set-json 'hedera-explorer.ingress.hosts[0]={"host":"${hederaExplorerTlsHostName}","paths":[{"path":"/","pathType":"Prefix"}]}'` + + if (hederaExplorerTlsLoadBalancerIp !== '') { + valuesArg += ` --set haproxy-ingress.controller.service.loadBalancerIP=${hederaExplorerTlsLoadBalancerIp}` + } + + if (tlsClusterIssuerType === 'self-signed') { + valuesArg += ' --set cloud.selfSignedClusterIssuer.enabled=true' + } else { + valuesArg += ' --set cloud.acmeClusterIssuer.enabled=true' + valuesArg += ` --set hedera-explorer.certClusterIssuerType=${tlsClusterIssuerType}` + } } + return valuesArg + } + + /** + * @param {Object} config + * @returns {Promise} + */ + async prepareValuesArg (config) { + let valuesArg = '' + const profileName = this.configManager.getFlag(flags.profileName) const profileValuesFile = await this.profileManager.prepareValuesForMirrorNodeChart(profileName) if (profileValuesFile) { valuesArg += this.prepareValuesFiles(profileValuesFile) } - valuesArg += ` --set hedera-mirror-node.enabled=true --set hedera-explorer.enabled=${deployHederaExplorer}` + if (config.enableHederaExplorerTls) { + valuesArg += this.getTlsValueArguments(config.tlsClusterIssuerType, config.enableHederaExplorerTls, config.namespace, + config.hederaExplorerTlsLoadBalancerIp, config.hederaExplorerTlsHostName) + } + + if (config.mirrorNodeVersion) { + valuesArg += ` --set global.image.tag=${config.mirrorNodeVersion}` + } + + valuesArg += ` --set hedera-mirror-node.enabled=true --set hedera-explorer.enabled=${config.deployHederaExplorer}` + + if (config.valuesFile) { + valuesArg += this.prepareValuesFiles(config.valuesFile) + } + return valuesArg } + /** + * @param {Object} argv + * @returns {Promise} + */ async deploy (argv) { const self = this @@ -56,29 +144,55 @@ export class MirrorNodeCommand extends BaseCommand { title: 'Initialize', task: async (ctx, task) => { self.configManager.update(argv) - await prompts.execute(task, self.configManager, [ + + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.chartDirectory, flags.deployHederaExplorer, - flags.namespace, - flags.profileFile, - flags.profileName + flags.enableHederaExplorerTls, + flags.fstChartVersion, + flags.hederaExplorerTlsHostName, + flags.hederaExplorerTlsLoadBalancerIp, + flags.tlsClusterIssuerType, + flags.valuesFile, + flags.mirrorNodeVersion ]) - ctx.config = { - chartDir: self.configManager.getFlag(flags.chartDirectory), - deployHederaExplorer: self.configManager.getFlag(flags.deployHederaExplorer), - fstChartVersion: this.configManager.getFlag(flags.fstChartVersion), - namespace: self.configManager.getFlag(flags.namespace) - } - - ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDir, + await prompts.execute(task, self.configManager, MirrorNodeCommand.DEPLOY_FLAGS_LIST) + + /** + * @typedef {Object} MirrorNodeDeployConfigClass + * -- flags -- + * @property {string} chartDirectory + * @property {boolean} deployHederaExplorer + * @property {string} enableHederaExplorerTls + * @property {string} fstChartVersion + * @property {string} hederaExplorerTlsHostName + * @property {string} hederaExplorerTlsLoadBalancerIp + * @property {string} namespace + * @property {string} profileFile + * @property {string} profileName + * @property {string} tlsClusterIssuerType + * @property {string} valuesFile + * -- extra args -- + * @property {string} chartPath + * @property {string} valuesArg + * @property {string} mirrorNodeVersion + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + ctx.config = /** @type {MirrorNodeDeployConfigClass} **/ this.getConfig(MirrorNodeCommand.DEPLOY_CONFIGS_NAME, MirrorNodeCommand.DEPLOY_FLAGS_LIST, + ['chartPath', 'valuesArg']) + + ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDirectory, constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) - ctx.config.stagingDir = Templates.renderStagingDir(self.configManager, flags) - - ctx.config.valuesArg = await self.prepareValuesArg( - ctx.config.valuesFile, - ctx.config.deployHederaExplorer - ) + ctx.config.valuesArg = await self.prepareValuesArg(ctx.config) if (!await self.k8.hasNamespace(ctx.config.namespace)) { throw new FullstackTestingError(`namespace ${ctx.config.namespace} does not exist`) @@ -124,35 +238,35 @@ export class MirrorNodeCommand extends BaseCommand { const subTasks = [ { title: 'Check Postgres DB', - task: async (ctx, _) => self.k8.waitForPodReady([ + task: async (ctx, _) => await self.k8.waitForPodReady([ 'app.kubernetes.io/component=postgresql', 'app.kubernetes.io/name=postgres' ], 1, 300, 2000) }, { title: 'Check REST API', - task: async (ctx, _) => self.k8.waitForPodReady([ + task: async (ctx, _) => await self.k8.waitForPodReady([ 'app.kubernetes.io/component=rest', 'app.kubernetes.io/name=rest' ], 1, 300, 2000) }, { title: 'Check GRPC', - task: async (ctx, _) => self.k8.waitForPodReady([ + task: async (ctx, _) => await self.k8.waitForPodReady([ 'app.kubernetes.io/component=grpc', 'app.kubernetes.io/name=grpc' ], 1, 300, 2000) }, { title: 'Check Monitor', - task: async (ctx, _) => self.k8.waitForPodReady([ + task: async (ctx, _) => await self.k8.waitForPodReady([ 'app.kubernetes.io/component=monitor', 'app.kubernetes.io/name=monitor' ], 1, 300, 2000) }, { title: 'Check Importer', - task: async (ctx, _) => self.k8.waitForPodReady([ + task: async (ctx, _) => await self.k8.waitForPodReady([ 'app.kubernetes.io/component=importer', 'app.kubernetes.io/name=importer' ], 1, 300, 2000) @@ -160,7 +274,7 @@ export class MirrorNodeCommand extends BaseCommand { { title: 'Check Hedera Explorer', skip: (ctx, _) => !ctx.config.deployHederaExplorer, - task: async (ctx, _) => self.k8.waitForPodReady([ + task: async (ctx, _) => await self.k8.waitForPodReady([ 'app.kubernetes.io/component=hedera-explorer', 'app.kubernetes.io/name=hedera-explorer' ], 1, 300, 2000) @@ -172,6 +286,56 @@ export class MirrorNodeCommand extends BaseCommand { rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION }) } + }, + { + title: 'Seed DB data', + task: async (ctx, parentTask) => { + const subTasks = [ + { + title: 'Insert data in public.file_data', + task: async (ctx, _) => { + const namespace = self.configManager.getFlag(flags.namespace) + + const feesFileIdNum = 111 + const exchangeRatesFileIdNum = 112 + const timestamp = Date.now() + + const fees = await getFileContents(this.accountManager, namespace, feesFileIdNum) + const exchangeRates = await getFileContents(this.accountManager, namespace, exchangeRatesFileIdNum) + + const importFeesQuery = `INSERT INTO public.file_data(file_data, consensus_timestamp, entity_id, transaction_type) VALUES (decode('${fees}', 'hex'), ${timestamp + '000000'}, ${feesFileIdNum}, 17);` + const importExchangeRatesQuery = `INSERT INTO public.file_data(file_data, consensus_timestamp, entity_id, transaction_type) VALUES (decode('${exchangeRates}', 'hex'), ${ + timestamp + '000001' + }, ${exchangeRatesFileIdNum}, 17);` + const sqlQuery = [importFeesQuery, importExchangeRatesQuery].join('\n') + + const pods = await this.k8.getPodsByLabel(['app.kubernetes.io/name=postgres']) + if (pods.length === 0) { + throw new FullstackTestingError('postgres pod not found') + } + const postgresPodName = pods[0].metadata.name + const postgresContainerName = 'postgresql' + const mirrorEnvVars = await self.k8.execContainer(postgresPodName, postgresContainerName, '/bin/bash -c printenv') + const mirrorEnvVarsArray = mirrorEnvVars.split('\n') + const HEDERA_MIRROR_IMPORTER_DB_OWNER = getEnvValue(mirrorEnvVarsArray, 'HEDERA_MIRROR_IMPORTER_DB_OWNER') + const HEDERA_MIRROR_IMPORTER_DB_OWNERPASSWORD = getEnvValue(mirrorEnvVarsArray, 'HEDERA_MIRROR_IMPORTER_DB_OWNERPASSWORD') + const HEDERA_MIRROR_IMPORTER_DB_NAME = getEnvValue(mirrorEnvVarsArray, 'HEDERA_MIRROR_IMPORTER_DB_NAME') + + await self.k8.execContainer(postgresPodName, postgresContainerName, [ + 'psql', + `postgresql://${HEDERA_MIRROR_IMPORTER_DB_OWNER}:${HEDERA_MIRROR_IMPORTER_DB_OWNERPASSWORD}@localhost:5432/${HEDERA_MIRROR_IMPORTER_DB_NAME}`, + '-c', + sqlQuery + ]) + } + } + ] + + return parentTask.newListr(subTasks, { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + } } ], { concurrent: false, @@ -190,6 +354,10 @@ export class MirrorNodeCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async destroy (argv) { const self = this @@ -215,16 +383,14 @@ export class MirrorNodeCommand extends BaseCommand { ]) ctx.config = { - chartDir: self.configManager.getFlag(flags.chartDirectory), + chartDirectory: self.configManager.getFlag(flags.chartDirectory), fstChartVersion: this.configManager.getFlag(flags.fstChartVersion), namespace: self.configManager.getFlag(flags.namespace) } - ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDir, + ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDirectory, constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) - ctx.config.stagingDir = Templates.renderStagingDir(self.configManager, flags) - ctx.config.valuesArg = ' --set hedera-mirror-node.enabled=false --set hedera-explorer.enabled=false' if (!await self.k8.hasNamespace(ctx.config.namespace)) { @@ -281,7 +447,8 @@ export class MirrorNodeCommand extends BaseCommand { /** * Return Yargs command definition for 'mirror-mirror-node' command - * @param mirrorNodeCmd an instance of NodeCommand + * @param {MirrorNodeCommand} mirrorNodeCmd an instance of MirrorNodeCommand + * @returns {{command: string, desc: string, builder: Function}} */ static getCommandDefinition (mirrorNodeCmd) { if (!mirrorNodeCmd || !(mirrorNodeCmd instanceof MirrorNodeCommand)) { @@ -289,18 +456,13 @@ export class MirrorNodeCommand extends BaseCommand { } return { command: 'mirror-node', - desc: 'Manage Hedera Mirror Node in fullstack testing network', + desc: 'Manage Hedera Mirror Node in solo network', builder: yargs => { return yargs .command({ command: 'deploy', desc: 'Deploy mirror-node and its components', - builder: y => flags.setCommandFlags(y, - flags.deployHederaExplorer, - flags.namespace, - flags.profileFile, - flags.profileName - ), + builder: y => flags.setCommandFlags(y, ...MirrorNodeCommand.DEPLOY_FLAGS_LIST), handler: argv => { mirrorNodeCmd.logger.debug('==== Running \'mirror-node deploy\' ===') mirrorNodeCmd.logger.debug(argv) @@ -318,7 +480,9 @@ export class MirrorNodeCommand extends BaseCommand { command: 'destroy', desc: 'Destroy mirror-node components and database', builder: y => flags.setCommandFlags(y, + flags.chartDirectory, flags.force, + flags.fstChartVersion, flags.namespace ), handler: argv => { diff --git a/src/commands/network.mjs b/src/commands/network.mjs index 81b4d9917..d00048ba4 100644 --- a/src/commands/network.mjs +++ b/src/commands/network.mjs @@ -14,66 +14,97 @@ * limitations under the License. * */ +'use strict' import { ListrEnquirerPromptAdapter } from '@listr2/prompt-adapter-enquirer' import chalk from 'chalk' import { Listr } from 'listr2' import { FullstackTestingError, IllegalArgumentError, MissingArgumentError } from '../core/errors.mjs' import { BaseCommand } from './base.mjs' import * as flags from './flags.mjs' -import { constants } from '../core/index.mjs' +import { constants, Templates } from '../core/index.mjs' import * as prompts from './prompts.mjs' import * as helpers from '../core/helpers.mjs' +import path from 'path' +import { addDebugOptions, validatePath } from '../core/helpers.mjs' +import fs from 'fs' export class NetworkCommand extends BaseCommand { + /** + * @param {{profileManager: ProfileManager, logger: Logger, helm: Helm, k8: K8, chartManager: ChartManager, + * configManager: ConfigManager, depManager: DependencyManager, downloader: PackageDownloader}} opts + */ constructor (opts) { super(opts) + if (!opts || !opts.k8) throw new Error('An instance of core/K8 is required') + if (!opts || !opts.keyManager) throw new IllegalArgumentError('An instance of core/KeyManager is required', opts.keyManager) + if (!opts || !opts.platformInstaller) throw new IllegalArgumentError('An instance of core/PlatformInstaller is required', opts.platformInstaller) if (!opts || !opts.profileManager) throw new MissingArgumentError('An instance of core/ProfileManager is required', opts.downloader) + this.k8 = opts.k8 + this.keyManager = opts.keyManager + this.platformInstaller = opts.platformInstaller this.profileManager = opts.profileManager } - getTlsValueArguments (tlsClusterIssuerType, enableHederaExplorerTls, namespace, - hederaExplorerTlsLoadBalancerIp, hederaExplorerTlsHostName) { - let valuesArg = '' - - if (enableHederaExplorerTls) { - if (!['acme-staging', 'acme-prod', 'self-signed'].includes(tlsClusterIssuerType)) { - throw new Error(`Invalid TLS cluster issuer type: ${tlsClusterIssuerType}, must be one of: "acme-staging", "acme-prod", or "self-signed"`) - } - - valuesArg += ' --set hedera-explorer.ingress.enabled=true' - valuesArg += ' --set cloud.haproxyIngressController.enabled=true' - valuesArg += ` --set global.ingressClassName=${namespace}-hedera-explorer-ingress-class` - valuesArg += ` --set-json 'hedera-explorer.ingress.hosts[0]={"host":"${hederaExplorerTlsHostName}","paths":[{"path":"/","pathType":"Prefix"}]}'` - - if (hederaExplorerTlsLoadBalancerIp !== '') { - valuesArg += ` --set haproxy-ingress.controller.service.loadBalancerIP=${hederaExplorerTlsLoadBalancerIp}` - } - - if (tlsClusterIssuerType === 'self-signed') { - valuesArg += ' --set cloud.selfSignedClusterIssuer.enabled=true' - } else { - valuesArg += ' --set cloud.acmeClusterIssuer.enabled=true' - valuesArg += ` --set hedera-explorer.certClusterIssuerType=${tlsClusterIssuerType}` - } - } + /** + * @returns {string} + */ + static get DEPLOY_CONFIGS_NAME () { + return 'deployConfigs' + } - return valuesArg + /** + * @returns {CommandFlag[]} + */ + static get DEPLOY_FLAGS_LIST () { + return [ + flags.apiPermissionProperties, + flags.app, + flags.applicationEnv, + flags.applicationProperties, + flags.bootstrapProperties, + flags.cacheDir, + flags.chainId, + flags.chartDirectory, + flags.enablePrometheusSvcMonitor, + flags.fstChartVersion, + flags.debugNodeId, + flags.log4j2Xml, + flags.namespace, + flags.nodeIDs, + flags.persistentVolumeClaims, + flags.profileFile, + flags.profileName, + flags.releaseTag, + flags.settingTxt, + flags.valuesFile + ] } + /** + * @param {Object} config + * @returns {Promise} + */ async prepareValuesArg (config = {}) { let valuesArg = '' - if (config.chartDir) { - valuesArg = `-f ${config.chartDir}/fullstack-deployment/values.yaml` + if (config.chartDirectory) { + valuesArg = `-f ${path.join(config.chartDirectory, 'fullstack-deployment', 'values.yaml')}` } - if (config.valuesFile) { - valuesArg += this.prepareValuesFiles(config.valuesFile) + if (config.app !== constants.HEDERA_APP_NAME) { + const index = config.nodeIds.length + for (let i = 0; i < index; i++) { + valuesArg += ` --set "hedera.nodes[${i}].root.extraEnv[0].name=JAVA_MAIN_CLASS"` + valuesArg += ` --set "hedera.nodes[${i}].root.extraEnv[0].value=com.swirlds.platform.Browser"` + } + valuesArg = addDebugOptions(valuesArg, config.debugNodeId, 1) + } else { + valuesArg = addDebugOptions(valuesArg, config.debugNodeId) } const profileName = this.configManager.getFlag(flags.profileName) - this.profileValuesFile = await this.profileManager.prepareValuesForFstChart(profileName, config.applicationEnv) + this.profileValuesFile = await this.profileManager.prepareValuesForFstChart(profileName) if (this.profileValuesFile) { valuesArg += this.prepareValuesFiles(this.profileValuesFile) } @@ -82,61 +113,119 @@ export class NetworkCommand extends BaseCommand { valuesArg += ' --set "hedera-mirror-node.enabled=false" --set "hedera-explorer.enabled=false"' valuesArg += ` --set "telemetry.prometheus.svcMonitor.enabled=${config.enablePrometheusSvcMonitor}"` - if (config.enableHederaExplorerTls) { - valuesArg += this.getTlsValueArguments(config.tlsClusterIssuerType, config.enableHederaExplorerTls, config.namespace, - config.hederaExplorerTlsLoadBalancerIp, config.hederaExplorerTlsHostName) - } - if (config.releaseTag) { const rootImage = helpers.getRootImageRepository(config.releaseTag) valuesArg += ` --set "defaults.root.image.repository=${rootImage}"` } + valuesArg += ` --set "defaults.volumeClaims.enabled=${config.persistentVolumeClaims}"` + + if (config.valuesFile) { + valuesArg += this.prepareValuesFiles(config.valuesFile) + } + this.logger.debug('Prepared helm chart values', { valuesArg }) return valuesArg } + /** + * @param task + * @param {Object} argv + * @returns {Promise} + */ async prepareConfig (task, argv) { - const flagList = [ - flags.releaseTag, // we need it to determine which version of root image(Java17 or Java21) we should use - flags.namespace, - flags.nodeIDs, - flags.chartDirectory, - flags.valuesFile, - flags.tlsClusterIssuerType, - flags.enableHederaExplorerTls, - flags.hederaExplorerTlsHostName, - flags.enablePrometheusSvcMonitor, - flags.profileFile, - flags.profileName - ] - this.configManager.update(argv) this.logger.debug('Loaded cached config', { config: this.configManager.config }) - await prompts.execute(task, this.configManager, flagList) + + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.apiPermissionProperties, + flags.app, + flags.applicationEnv, + flags.applicationProperties, + flags.bootstrapProperties, + flags.cacheDir, + flags.chainId, + flags.debugNodeId, + flags.log4j2Xml, + flags.persistentVolumeClaims, + flags.profileName, + flags.profileFile, + flags.settingTxt + ]) + + await prompts.execute(task, this.configManager, NetworkCommand.DEPLOY_FLAGS_LIST) + + /** + * @typedef {Object} NetworkDeployConfigClass + * -- flags -- + * @property {string} applicationEnv + * @property {string} cacheDir + * @property {string} chartDirectory + * @property {boolean} enablePrometheusSvcMonitor + * @property {string} fstChartVersion + * @property {string} namespace + * @property {string} nodeIDs + * @property {string} persistentVolumeClaims + * @property {string} profileFile + * @property {string} profileName + * @property {string} releaseTag + * -- extra args -- + * @property {string} chartPath + * @property {string} keysDir + * @property {string[]} nodeIds + * @property {string} stagingDir + * @property {string} stagingKeysDir + * @property {string} valuesArg + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ // create a config object for subsequent steps - const config = { - releaseTag: this.configManager.getFlag(flags.releaseTag), - namespace: this.configManager.getFlag(flags.namespace), - nodeIds: helpers.parseNodeIds(this.configManager.getFlag(flags.nodeIDs)), - chartDir: this.configManager.getFlag(flags.chartDirectory), - fstChartVersion: this.configManager.getFlag(flags.fstChartVersion), - valuesFile: this.configManager.getFlag(flags.valuesFile), - tlsClusterIssuerType: this.configManager.getFlag(flags.tlsClusterIssuerType), - enableHederaExplorerTls: this.configManager.getFlag(flags.enableHederaExplorerTls), - hederaExplorerTlsHostName: this.configManager.getFlag(flags.hederaExplorerTlsHostName), - enablePrometheusSvcMonitor: this.configManager.getFlag(flags.enablePrometheusSvcMonitor), - applicationEnv: this.configManager.getFlag(flags.applicationEnv) - } + const config = /** @type {NetworkDeployConfigClass} **/ this.getConfig(NetworkCommand.DEPLOY_CONFIGS_NAME, NetworkCommand.DEPLOY_FLAGS_LIST, + [ + 'chartPath', + 'keysDir', + 'nodeIds', + 'stagingDir', + 'stagingKeysDir', + 'valuesArg' + ]) + + config.nodeIds = helpers.parseNodeIds(config.nodeIDs) // compute values - config.hederaExplorerTlsLoadBalancerIp = argv.hederaExplorerTlsLoadBalancerIp - config.chartPath = await this.prepareChartPath(config.chartDir, + config.chartPath = await this.prepareChartPath(config.chartDirectory, constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) config.valuesArg = await this.prepareValuesArg(config) + // compute other config parameters + config.keysDir = path.join(validatePath(config.cacheDir), 'keys') + config.stagingDir = Templates.renderStagingDir( + config.cacheDir, + config.releaseTag + ) + config.stagingKeysDir = path.join(validatePath(config.stagingDir), 'keys') + + if (!await this.k8.hasNamespace(config.namespace)) { + await this.k8.createNamespace(config.namespace) + } + + // prepare staging keys directory + if (!fs.existsSync(config.stagingKeysDir)) { + fs.mkdirSync(config.stagingKeysDir, { recursive: true }) + } + + // create cached keys dir if it does not exist yet + if (!fs.existsSync(config.keysDir)) { + fs.mkdirSync(config.keysDir) + } + this.logger.debug('Prepared config', { config, cachedConfig: this.configManager.config @@ -146,8 +235,8 @@ export class NetworkCommand extends BaseCommand { /** * Run helm install and deploy network components - * @param argv - * @return {Promise} + * @param {Object} argv + * @returns {Promise} */ async deploy (argv) { const self = this @@ -156,22 +245,67 @@ export class NetworkCommand extends BaseCommand { { title: 'Initialize', task: async (ctx, task) => { - ctx.config = await self.prepareConfig(task, argv) + ctx.config = /** @type {NetworkDeployConfigClass} **/ await self.prepareConfig(task, argv) + } + }, + { + title: 'Prepare staging directory', + task: async (ctx, parentTask) => { + const subTasks = [ + { + title: 'Copy Gossip keys to staging', + task: async (ctx, _) => { + const config = /** @type {NetworkDeployConfigClass} **/ ctx.config + + await this.keyManager.copyGossipKeysToStaging(config.keysDir, config.stagingKeysDir, config.nodeIds) + } + }, + { + title: 'Copy gRPC TLS keys to staging', + task: async (ctx, _) => { + const config = /** @type {NetworkDeployConfigClass} **/ ctx.config + for (const nodeId of config.nodeIds) { + const tlsKeyFiles = self.keyManager.prepareTLSKeyFilePaths(nodeId, config.keysDir) + await self.keyManager.copyNodeKeysToStaging(tlsKeyFiles, config.stagingKeysDir) + } + } + } + ] + + return parentTask.newListr(subTasks, { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + } + }, + { + title: 'Copy node keys to secrets', + task: async (ctx, parentTask) => { + const config = /** @type {NetworkDeployConfigClass} **/ ctx.config + + const subTasks = self.platformInstaller.copyNodeKeys(config.stagingDir, config.nodeIds) + + // set up the sub-tasks + return parentTask.newListr(subTasks, { + concurrent: true, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) } }, { title: `Install chart '${constants.FULLSTACK_DEPLOYMENT_CHART}'`, task: async (ctx, _) => { - if (await self.chartManager.isChartInstalled(ctx.config.namespace, constants.FULLSTACK_DEPLOYMENT_CHART)) { - await self.chartManager.uninstall(ctx.config.namespace, constants.FULLSTACK_DEPLOYMENT_CHART) + const config = /** @type {NetworkDeployConfigClass} **/ ctx.config + if (await self.chartManager.isChartInstalled(config.namespace, constants.FULLSTACK_DEPLOYMENT_CHART)) { + await self.chartManager.uninstall(config.namespace, constants.FULLSTACK_DEPLOYMENT_CHART) } await this.chartManager.install( - ctx.config.namespace, + config.namespace, constants.FULLSTACK_DEPLOYMENT_CHART, - ctx.config.chartPath, - ctx.config.fstChartVersion, - ctx.config.valuesArg) + config.chartPath, + config.fstChartVersion, + config.valuesArg) } }, { @@ -179,13 +313,14 @@ export class NetworkCommand extends BaseCommand { task: async (ctx, task) => { const subTasks = [] + const config = /** @type {NetworkDeployConfigClass} **/ ctx.config // nodes - for (const nodeId of ctx.config.nodeIds) { + for (const nodeId of config.nodeIds) { subTasks.push({ title: `Check Node: ${chalk.yellow(nodeId)}`, - task: () => - self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ + task: async () => + await self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ 'fullstack.hedera.com/type=network-node', `fullstack.hedera.com/node-name=${nodeId}` ], 1, 60 * 15, 1000) // timeout 15 minutes @@ -206,24 +341,25 @@ export class NetworkCommand extends BaseCommand { task: async (ctx, task) => { const subTasks = [] + const config = /** @type {NetworkDeployConfigClass} **/ ctx.config // HAProxy - for (const nodeId of ctx.config.nodeIds) { + for (const nodeId of config.nodeIds) { subTasks.push({ title: `Check HAProxy for: ${chalk.yellow(nodeId)}`, - task: () => - self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ + task: async () => + await self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ 'fullstack.hedera.com/type=haproxy' ], 1, 60 * 15, 1000) // timeout 15 minutes }) } // Envoy Proxy - for (const nodeId of ctx.config.nodeIds) { + for (const nodeId of config.nodeIds) { subTasks.push({ title: `Check Envoy Proxy for: ${chalk.yellow(nodeId)}`, - task: () => - self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ + task: async () => + await self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ 'fullstack.hedera.com/type=envoy-proxy' ], 1, 60 * 15, 1000) // timeout 15 minutes }) @@ -247,8 +383,8 @@ export class NetworkCommand extends BaseCommand { // minio subTasks.push({ title: 'Check MinIO', - task: () => - self.k8.waitForPodReady([ + task: async () => + await self.k8.waitForPodReady([ 'v1.min.io/tenant=minio' ], 1, 60 * 5, 1000) // timeout 5 minutes }) @@ -278,8 +414,8 @@ export class NetworkCommand extends BaseCommand { /** * Run helm uninstall and destroy network components - * @param argv - * @return {Promise} + * @param {Object} argv + * @returns {Promise} */ async destroy (argv) { const self = this @@ -362,8 +498,8 @@ export class NetworkCommand extends BaseCommand { /** * Run helm upgrade to refresh network components with new settings - * @param argv - * @return {Promise} + * @param {Object} argv + * @returns {Promise} */ async refresh (argv) { const self = this @@ -378,12 +514,13 @@ export class NetworkCommand extends BaseCommand { { title: `Upgrade chart '${constants.FULLSTACK_DEPLOYMENT_CHART}'`, task: async (ctx, _) => { + const config = ctx.config await this.chartManager.upgrade( - ctx.config.namespace, + config.namespace, constants.FULLSTACK_DEPLOYMENT_CHART, - ctx.config.chartPath, - ctx.config.valuesArg, - ctx.config.fstChartVersion + config.chartPath, + config.valuesArg, + config.fstChartVersion ) } }, @@ -409,34 +546,23 @@ export class NetworkCommand extends BaseCommand { return true } + /** + * @param {NetworkCommand} networkCmd + * @returns {{command: string, desc: string, builder: Function}} + */ static getCommandDefinition (networkCmd) { if (!networkCmd || !(networkCmd instanceof NetworkCommand)) { throw new IllegalArgumentError('An instance of NetworkCommand is required', networkCmd) } return { command: 'network', - desc: 'Manage fullstack testing network deployment', + desc: 'Manage solo network deployment', builder: yargs => { return yargs .command({ command: 'deploy', - desc: 'Deploy fullstack testing network', - builder: y => flags.setCommandFlags(y, - flags.releaseTag, - flags.namespace, - flags.nodeIDs, - flags.chartDirectory, - flags.valuesFile, - flags.tlsClusterIssuerType, - flags.enableHederaExplorerTls, - flags.hederaExplorerTlsLoadBalancerIp, - flags.hederaExplorerTlsHostName, - flags.enablePrometheusSvcMonitor, - flags.fstChartVersion, - flags.profileFile, - flags.profileName, - flags.applicationEnv - ), + desc: 'Deploy solo network', + builder: y => flags.setCommandFlags(y, ...NetworkCommand.DEPLOY_FLAGS_LIST), handler: argv => { networkCmd.logger.debug('==== Running \'network deploy\' ===') networkCmd.logger.debug(argv) @@ -453,7 +579,7 @@ export class NetworkCommand extends BaseCommand { }) .command({ command: 'destroy', - desc: 'Destroy fullstack testing network', + desc: 'Destroy solo network', builder: y => flags.setCommandFlags(y, flags.deletePvcs, flags.deleteSecrets, @@ -476,20 +602,8 @@ export class NetworkCommand extends BaseCommand { }) .command({ command: 'refresh', - desc: 'Refresh fullstack testing network deployment', - builder: y => flags.setCommandFlags(y, - flags.namespace, - flags.chartDirectory, - flags.valuesFile, - flags.deployMirrorNode, - flags.deployHederaExplorer, - flags.tlsClusterIssuerType, - flags.enableHederaExplorerTls, - flags.hederaExplorerTlsLoadBalancerIp, - flags.hederaExplorerTlsHostName, - flags.enablePrometheusSvcMonitor, - flags.applicationEnv - ), + desc: 'Refresh solo network deployment', + builder: y => flags.setCommandFlags(y, ...NetworkCommand.DEPLOY_FLAGS_LIST), handler: argv => { networkCmd.logger.debug('==== Running \'chart upgrade\' ===') networkCmd.logger.debug(argv) diff --git a/src/commands/node.mjs b/src/commands/node.mjs index 2443ed61d..4216c8716 100644 --- a/src/commands/node.mjs +++ b/src/commands/node.mjs @@ -14,32 +14,62 @@ * limitations under the License. * */ +'use strict' +import * as x509 from '@peculiar/x509' import chalk from 'chalk' import * as fs from 'fs' -import { readFile, writeFile } from 'fs/promises' import { Listr } from 'listr2' import path from 'path' import { FullstackTestingError, IllegalArgumentError } from '../core/errors.mjs' import * as helpers from '../core/helpers.mjs' -import { getNodeLogs, getTmpDir, sleep, validatePath } from '../core/helpers.mjs' -import { constants, Templates } from '../core/index.mjs' +import { + addDebugOptions, + getNodeAccountMap, + getNodeLogs, + renameAndCopyFile, + sleep, + validatePath +} from '../core/helpers.mjs' +import { constants, Templates, Zippy } from '../core/index.mjs' import { BaseCommand } from './base.mjs' import * as flags from './flags.mjs' import * as prompts from './prompts.mjs' + import { + AccountBalanceQuery, AccountId, - FileContentsQuery, - FileId, + AccountUpdateTransaction, + FileAppendTransaction, + FileUpdateTransaction, FreezeTransaction, FreezeType, + PrivateKey, + NodeCreateTransaction, + NodeUpdateTransaction, + NodeDeleteTransaction, + ServiceEndpoint, Timestamp } from '@hashgraph/sdk' import * as crypto from 'crypto' +import { + DEFAULT_NETWORK_NODE_NAME, + FREEZE_ADMIN_ACCOUNT, + HEDERA_NODE_DEFAULT_STAKE_AMOUNT, + TREASURY_ACCOUNT_ID, + LOCAL_HOST +} from '../core/constants.mjs' +import { NodeStatusCodes, NodeStatusEnums } from '../core/enumerations.mjs' /** * Defines the core functionalities of 'node' command */ export class NodeCommand extends BaseCommand { + /** + * @param {{logger: Logger, helm: Helm, k8: K8, chartManager: ChartManager, configManager: ConfigManager, + * depManager: DependencyManager, keytoolDepManager: KeytoolDependencyManager, downloader: PackageDownloader, + * platformInstaller: PlatformInstaller, keyManager: KeyManager, accountManager: AccountManager, + * profileManager: ProfileManager}} opts + */ constructor (opts) { super(opts) @@ -48,15 +78,204 @@ export class NodeCommand extends BaseCommand { if (!opts || !opts.keyManager) throw new IllegalArgumentError('An instance of core/KeyManager is required', opts.keyManager) if (!opts || !opts.accountManager) throw new IllegalArgumentError('An instance of core/AccountManager is required', opts.accountManager) if (!opts || !opts.keytoolDepManager) throw new IllegalArgumentError('An instance of KeytoolDependencyManager is required', opts.keytoolDepManager) + if (!opts || !opts.profileManager) throw new IllegalArgumentError('An instance of ProfileManager is required', opts.profileManager) this.downloader = opts.downloader this.platformInstaller = opts.platformInstaller this.keyManager = opts.keyManager this.accountManager = opts.accountManager this.keytoolDepManager = opts.keytoolDepManager + this.profileManager = opts.profileManager this._portForwards = [] } + /** + * @returns {string} + */ + static get SETUP_CONFIGS_NAME () { + return 'setupConfigs' + } + + /** + * @returns {CommandFlag[]} + */ + static get SETUP_FLAGS_LIST () { + return [ + flags.app, + flags.appConfig, + flags.cacheDir, + flags.devMode, + flags.localBuildPath, + flags.namespace, + flags.nodeIDs, + flags.releaseTag + ] + } + + /** + * @returns {string} + */ + static get KEYS_CONFIGS_NAME () { + return 'keysConfigs' + } + + /** + * @returns {CommandFlag[]} + */ + static get KEYS_FLAGS_LIST () { + return [ + flags.cacheDir, + flags.devMode, + flags.generateGossipKeys, + flags.generateTlsKeys, + flags.nodeIDs + ] + } + + /** + * @returns {string} + */ + static get REFRESH_CONFIGS_NAME () { + return 'refreshConfigs' + } + + /** + * @returns {CommandFlag[]} + */ + static get REFRESH_FLAGS_LIST () { + return [ + flags.app, + flags.cacheDir, + flags.devMode, + flags.localBuildPath, + flags.namespace, + flags.nodeIDs, + flags.releaseTag + ] + } + + /** + * @returns {string} + */ + static get ADD_CONFIGS_NAME () { + return 'addConfigs' + } + + /** + * @returns {CommandFlag[]} + */ + static get COMMON_ADD_FLAGS_LIST () { + return [ + flags.app, + flags.cacheDir, + flags.chainId, + flags.chartDirectory, + flags.devMode, + flags.debugNodeId, + flags.endpointType, + flags.fstChartVersion, + flags.generateGossipKeys, + flags.generateTlsKeys, + flags.gossipEndpoints, + flags.grpcEndpoints, + flags.localBuildPath, + flags.namespace, + flags.releaseTag + ] + } + + /** + * @returns {CommandFlag[]} + */ + static get ADD_FLAGS_LIST () { + const commonFlags = NodeCommand.COMMON_ADD_FLAGS_LIST + return [ + ...commonFlags, + flags.adminKey + ] + } + + /** + * @returns {CommandFlag[]} + */ + static get ADD_PREPARE_FLAGS_LIST () { + const commonFlags = NodeCommand.COMMON_ADD_FLAGS_LIST + return [ + ...commonFlags, + flags.adminKey, + flags.outputDir + ] + } + + /** + * @returns {CommandFlag[]} + */ + static get ADD_SUBMIT_TRANSACTIONS_FLAGS_LIST () { + const commonFlags = NodeCommand.COMMON_ADD_FLAGS_LIST + return [ + ...commonFlags, + flags.inputDir + ] + } + + /** + * @returns {CommandFlag[]} + */ + static get ADD_EXECUTE_FLAGS_LIST () { + const commonFlags = NodeCommand.COMMON_ADD_FLAGS_LIST + return [ + ...commonFlags, + flags.inputDir + ] + } + + static get DELETE_CONFIGS_NAME () { + return 'deleteConfigs' + } + + static get DELETE_FLAGS_LIST () { + return [ + flags.app, + flags.cacheDir, + flags.chartDirectory, + flags.devMode, + flags.debugNodeId, + flags.endpointType, + flags.localBuildPath, + flags.namespace, + flags.nodeID, + flags.releaseTag + ] + } + + static get UPDATE_CONFIGS_NAME () { + return 'updateConfigs' + } + + static get UPDATE_FLAGS_LIST () { + return [ + flags.app, + flags.cacheDir, + flags.chartDirectory, + flags.devMode, + flags.debugNodeId, + flags.endpointType, + flags.fstChartVersion, + flags.gossipEndpoints, + flags.gossipPrivateKey, + flags.gossipPublicKey, + flags.grpcEndpoints, + flags.localBuildPath, + flags.namespace, + flags.newAccountNumber, + flags.newAdminKey, + flags.nodeID, + flags.releaseTag, + flags.tlsPrivateKey, + flags.tlsPublicKey + ] + } + /** * stops and closes the port forwards * @returns {Promise} @@ -72,6 +291,60 @@ export class NodeCommand extends BaseCommand { this._portForwards = [] } + /** + * @param {string} namespace + * @param {string} accountId + * @param {string} nodeId + * @returns {Promise} + */ + async addStake (namespace, accountId, nodeId) { + try { + await this.accountManager.loadNodeClient(namespace) + const client = this.accountManager._nodeClient + const treasuryKey = await this.accountManager.getTreasuryAccountKeys(namespace) + const treasuryPrivateKey = PrivateKey.fromStringED25519(treasuryKey.privateKey) + client.setOperator(TREASURY_ACCOUNT_ID, treasuryPrivateKey) + + // get some initial balance + await this.accountManager.transferAmount(constants.TREASURY_ACCOUNT_ID, accountId, HEDERA_NODE_DEFAULT_STAKE_AMOUNT + 1) + + // check balance + const balance = await new AccountBalanceQuery() + .setAccountId(accountId) + .execute(client) + this.logger.debug(`Account ${accountId} balance: ${balance.hbars}`) + + // Create the transaction + const transaction = await new AccountUpdateTransaction() + .setAccountId(accountId) + .setStakedNodeId(Templates.nodeNumberFromNodeId(nodeId) - 1) + .freezeWith(client) + + // Sign the transaction with the account's private key + const signTx = await transaction.sign(treasuryPrivateKey) + + // Submit the transaction to a Hedera network + const txResponse = await signTx.execute(client) + + // Request the receipt of the transaction + const receipt = await txResponse.getReceipt(client) + + // Get the transaction status + const transactionStatus = receipt.status + this.logger.debug(`The transaction consensus status is ${transactionStatus.toString()}`) + } catch (e) { + throw new FullstackTestingError(`Error in adding stake: ${e.message}`, e) + } + } + + /** + * Check if the network node pod is running + * @param {string} namespace + * @param {string} nodeId + * @param {number} [maxAttempts] + * @param {number} [delay] + * @returns {Promise} + */ async checkNetworkNodePod (namespace, nodeId, maxAttempts = 60, delay = 2000) { nodeId = nodeId.trim() const podName = Templates.renderNetworkPodName(nodeId) @@ -88,69 +361,125 @@ export class NodeCommand extends BaseCommand { } } - async checkNetworkNodeState (nodeId, maxAttempt = 100, status = 'ACTIVE', logfile = 'output/hgcaa.log') { + /** + * @param {string} namespace + * @param {string} nodeId + * @param {TaskWrapper} task + * @param {string} title + * @param {number} index + * @param {number} [status] + * @param {number} [maxAttempts] + * @param {number} [delay] + * @param {number} [timeout] + * @returns {Promise} + */ + async checkNetworkNodeActiveness (namespace, nodeId, task, title, index, + status = NodeStatusCodes.ACTIVE, maxAttempts = 120, delay = 1_000, timeout = 1_000) { nodeId = nodeId.trim() const podName = Templates.renderNetworkPodName(nodeId) - const logfilePath = `${constants.HEDERA_HAPI_PATH}/${logfile}` + const podPort = 9_999 + const localPort = 19_000 + index + task.title = `${title} - status ${chalk.yellow('STARTING')}, attempt ${chalk.blueBright(`0/${maxAttempts}`)}` + + const srv = await this.k8.portForward(podName, localPort, podPort) + let attempt = 0 - let isActive = false + let success = false + while (attempt < maxAttempts) { + const controller = new AbortController() + + const timeoutId = setTimeout(() => { + task.title = `${title} - status ${chalk.yellow('TIMEOUT')}, attempt ${chalk.blueBright(`${attempt}/${maxAttempts}`)}` + controller.abort() + }, timeout) - this.logger.debug(`Checking if node ${nodeId} is ${status}...`) - // check log file is accessible - let logFileAccessible = false - while (attempt++ < maxAttempt) { try { - if (await this.k8.hasFile(podName, constants.ROOT_CONTAINER, logfilePath)) { - logFileAccessible = true - break + const url = `http://${LOCAL_HOST}:${localPort}/metrics` + const response = await fetch(url, { signal: controller.signal }) + + if (!response.ok) { + task.title = `${title} - status ${chalk.yellow('UNKNOWN')}, attempt ${chalk.blueBright(`${attempt}/${maxAttempts}`)}` + clearTimeout(timeoutId) + throw new Error() // Guard } - } catch (e) { - } // ignore errors - await sleep(1000) - } + const text = await response.text() + const statusLine = text + .split('\n') + .find(line => line.startsWith('platform_PlatformStatus')) - if (!logFileAccessible) { - throw new FullstackTestingError(`Logs are not accessible: ${logfilePath}`) - } + if (!statusLine) { + task.title = `${title} - status ${chalk.yellow('STARTING')}, attempt: ${chalk.blueBright(`${attempt}/${maxAttempts}`)}` + clearTimeout(timeoutId) + throw new Error() // Guard + } - attempt = 0 - while (attempt < maxAttempt) { - try { - const output = await this.k8.execContainer(podName, constants.ROOT_CONTAINER, ['tail', '-100', logfilePath]) - if (output && output.indexOf('Terminating Netty') < 0 && // make sure we are not at the beginning of a restart - (output.indexOf(`Now current platform status = ${status}`) > 0 || - output.indexOf(`Platform Status Change ${status}`) > 0 || - output.indexOf(`is ${status}`) > 0)) { // 'is ACTIVE' is for newer versions, first seen in v0.49.0 - this.logger.debug(`Node ${nodeId} is ${status} [ attempt: ${attempt}/${maxAttempt}]`) - isActive = true + const statusNumber = parseInt(statusLine.split(' ').pop()) + + if (statusNumber === status) { + task.title = `${title} - status ${chalk.green(NodeStatusEnums[status])}, attempt: ${chalk.blueBright(`${attempt}/${maxAttempts}`)}` + success = true + clearTimeout(timeoutId) break + } else if (statusNumber === NodeStatusCodes.CATASTROPHIC_FAILURE) { + task.title = `${title} - status ${chalk.red('CATASTROPHIC_FAILURE')}, attempt: ${chalk.blueBright(`${attempt}/${maxAttempts}`)}` + break + } else if (statusNumber) { + task.title = `${title} - status ${chalk.yellow(NodeStatusEnums[statusNumber])}, attempt: ${chalk.blueBright(`${attempt}/${maxAttempts}`)}` } - this.logger.debug(`Node ${nodeId} is not ${status} yet. Trying again... [ attempt: ${attempt}/${maxAttempt} ]`) - } catch (e) { - this.logger.warn(`error in checking if node ${nodeId} is ${status}: ${e.message}. Trying again... [ attempt: ${attempt}/${maxAttempt} ]`) - - // ls the HAPI path for debugging - await this.k8.execContainer(podName, constants.ROOT_CONTAINER, `ls -la ${constants.HEDERA_HAPI_PATH}`) + clearTimeout(timeoutId) + } catch {} // Catch all guard and fetch errors - // ls the output directory for debugging - await this.k8.execContainer(podName, constants.ROOT_CONTAINER, `ls -la ${constants.HEDERA_HAPI_PATH}/output`) - } - attempt += 1 - await sleep(1000) + attempt++ + clearTimeout(timeoutId) + await sleep(delay) } - this.logger.info(`!> -- Node ${nodeId} is ${status} -- } + */ + checkNodeActivenessTask (ctx, task, nodeIds, status = NodeStatusCodes.ACTIVE) { + const { config: { namespace } } = ctx + + const subTasks = nodeIds.map((nodeId, i) => { + const reminder = ('debugNodeId' in ctx.config && ctx.config.debugNodeId === nodeId) ? 'Please attach JVM debugger now.' : '' + const title = `Check network pod: ${chalk.yellow(nodeId)} ${chalk.red(reminder)}` + + const subTask = async (ctx, task) => { + ctx.config.podNames[nodeId] = await this.checkNetworkNodeActiveness(namespace, nodeId, task, title, i, status) + } + + return { title, task: subTask } + }) + + return task.newListr(subTasks, { + concurrent: true, + rendererOptions: { + collapseSubtasks: false + } + }) } /** * Return task for checking for all network node pods + * @param {any} ctx + * @param {TaskWrapper} task + * @param {string[]} nodeIds + * @returns {*} */ taskCheckNetworkNodePods (ctx, task, nodeIds) { if (!ctx.config) { @@ -179,168 +508,293 @@ export class NodeCommand extends BaseCommand { } /** - * Return a list of subtasks to generate gossip keys - * - * WARNING: These tasks MUST run in sequence. - * - * @param keyFormat key format (pem | pfx) - * @param nodeIds node ids - * @param keysDir keys directory - * @param curDate current date - * @param allNodeIds includes the nodeIds to get new keys as well as existing nodeIds that will be included in the public.pfx file - * @return a list of subtasks - * @private + * Return task for checking for all network node pods + * @param {any} ctx + * @param {TaskWrapper} task + * @param {string[]} nodeIds + * @returns {*} */ - _nodeGossipKeysTaskList (keyFormat, nodeIds, keysDir, curDate = new Date(), allNodeIds = null) { - allNodeIds = allNodeIds || nodeIds - if (!Array.isArray(nodeIds) || !nodeIds.every((nodeId) => typeof nodeId === 'string')) { - throw new IllegalArgumentError('nodeIds must be an array of strings') + checkPodRunningTask (ctx, task, nodeIds) { + const subTasks = [] + for (const nodeId of nodeIds) { + subTasks.push({ + title: `Check Node: ${chalk.yellow(nodeId)}`, + task: async () => + await this.k8.waitForPods([constants.POD_PHASE_RUNNING], [ + 'fullstack.hedera.com/type=network-node', + `fullstack.hedera.com/node-name=${nodeId}` + ], 1, 60 * 15, 1000) // timeout 15 minutes + }) } - const self = this + + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: false, // no need to run concurrently since if one node is up, the rest should be up by then + rendererOptions: { + collapseSubtasks: false + } + }) + } + + /** + * Return task for setup network nodes + * @param {any} ctx + * @param {TaskWrapper} task + * @param {string[]} nodeIds + * @returns {*} + */ + setupNodesTask (ctx, task, nodeIds) { + const subTasks = [] + for (const nodeId of nodeIds) { + const podName = ctx.config.podNames[nodeId] + subTasks.push({ + title: `Node: ${chalk.yellow(nodeId)}`, + task: () => + this.platformInstaller.taskSetup(podName) + }) + } + + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: true, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + } + + /** + * Return task for start network node hedera service + * @param {TaskWrapper} task + * @param {string[]} podNames + * @param {string[]} nodeIds + * @returns {*} + */ + startNetworkNodesTask (task, podNames, nodeIds) { const subTasks = [] + // ctx.config.allNodeIds = ctx.config.existingNodeIds + this.startNodes(podNames, nodeIds, subTasks) - switch (keyFormat) { - case constants.KEY_FORMAT_PFX: { - const tmpDir = getTmpDir() - const keytool = self.keytoolDepManager.getKeytool() - - subTasks.push({ - title: `Check keytool exists (Version: ${self.keytoolDepManager.getKeytoolVersion()})`, - task: async () => self.keytoolDepManager.checkVersion(true) - - }) - - subTasks.push({ - title: 'Backup old files', - task: () => helpers.backupOldPfxKeys(nodeIds, keysDir, curDate) - }) - - for (const nodeId of nodeIds) { - subTasks.push({ - title: `Generate ${Templates.renderGossipPfxPrivateKeyFile(nodeId)} for node: ${chalk.yellow(nodeId)}`, - task: async () => { - const privatePfxFile = await self.keyManager.generatePrivatePfxKeys(keytool, nodeId, keysDir, tmpDir) - const output = await keytool.list(`-storetype pkcs12 -storepass password -keystore ${privatePfxFile}`) - if (!output.includes('Your keystore contains 3 entries')) { - throw new FullstackTestingError(`malformed private pfx file: ${privatePfxFile}`) - } - } - }) - } + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: true, + rendererOptions: { + collapseSubtasks: false, + timer: constants.LISTR_DEFAULT_RENDERER_TIMER_OPTION + } + }) + } - subTasks.push({ - title: `Generate ${constants.PUBLIC_PFX} file`, - task: async () => { - const publicPfxFile = await self.keyManager.updatePublicPfxKey(self.keytoolDepManager.getKeytool(), allNodeIds, keysDir, tmpDir) - const output = await keytool.list(`-storetype pkcs12 -storepass password -keystore ${publicPfxFile}`) - if (!output.includes(`Your keystore contains ${allNodeIds.length * 3} entries`)) { - throw new FullstackTestingError(`malformed public.pfx file: ${publicPfxFile}`) - } - } - }) + /** + * Return task for check if node proxies are ready + * @param {any} ctx + * @param {TaskWrapper} task + * @param {string[]} nodeIds + * @returns {*} + */ + checkNodesProxiesTask (ctx, task, nodeIds) { + const subTasks = [] + for (const nodeId of nodeIds) { + subTasks.push({ + title: `Check proxy for node: ${chalk.yellow(nodeId)}`, + task: async () => await this.k8.waitForPodReady( + [`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy'], + 1, 300, 2000) + }) + } - subTasks.push({ - title: 'Clean up temp files', - task: async () => { - if (fs.existsSync(tmpDir)) { - fs.rmSync(tmpDir, { recursive: true }) - } - } - }) - break + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: false, + rendererOptions: { + collapseSubtasks: false } + }) + } - case constants.KEY_FORMAT_PEM: { - subTasks.push({ - title: 'Backup old files', - task: () => helpers.backupOldPemKeys(nodeIds, keysDir, curDate) - } - ) + /** + * Transfer some hbar to the node for staking purpose + * @param existingNodeIds + * @return {Promise} + */ + async checkStakingTask (existingNodeIds) { + const accountMap = getNodeAccountMap(existingNodeIds) + for (const nodeId of existingNodeIds) { + const accountId = accountMap.get(nodeId) + await this.accountManager.transferAmount(constants.TREASURY_ACCOUNT_ID, accountId, 1) + } + } - for (const nodeId of nodeIds) { - subTasks.push({ - title: `Gossip ${keyFormat} key for node: ${chalk.yellow(nodeId)}`, - task: async () => { - const signingKey = await this.keyManager.generateSigningKey(nodeId) - const signingKeyFiles = await this.keyManager.storeSigningKey(nodeId, signingKey, keysDir) - this.logger.debug(`generated Gossip signing keys for node ${nodeId}`, { keyFiles: signingKeyFiles }) + /** + * Task for repairing staging directory + * @param ctx + * @param task + * @param keysDir + * @param stagingKeysDir + * @param nodeIds + * @return return task for reparing staging directory + */ + prepareStagingTask (ctx, task, keysDir, stagingKeysDir, nodeIds) { + const subTasks = [ + { + title: 'Copy Gossip keys to staging', + task: async (ctx, _) => { + // const config = /** @type {NodeDeleteConfigClass} **/ ctx.config - const agreementKey = await this.keyManager.generateAgreementKey(nodeId, signingKey) - const agreementKeyFiles = await this.keyManager.storeAgreementKey(nodeId, agreementKey, keysDir) - this.logger.debug(`generated Gossip agreement keys for node ${nodeId}`, { keyFiles: agreementKeyFiles }) - } - }) + await this.keyManager.copyGossipKeysToStaging(keysDir, stagingKeysDir, nodeIds) + } + }, + { + title: 'Copy gRPC TLS keys to staging', + task: async (ctx, _) => { + // const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + for (const nodeId of nodeIds) { + const tlsKeyFiles = this.keyManager.prepareTLSKeyFilePaths(nodeId, keysDir) + await this.keyManager.copyNodeKeysToStaging(tlsKeyFiles, stagingKeysDir) + } } - - break } + ] + return task.newListr(subTasks, { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + } - default: - throw new FullstackTestingError(`unsupported key-format: ${keyFormat}`) - } + /** + * Return task for copy node key to staging directory + * @param ctx + * @param task + */ + copyNodeKeyTask (ctx, task) { + const subTasks = this.platformInstaller.copyNodeKeys(ctx.config.stagingDir, ctx.config.allNodeIds) - return subTasks + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: true, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) } /** - * Return a list of subtasks to generate gRPC TLS keys - * - * WARNING: These tasks should run in sequence - * - * @param nodeIds node ids - * @param keysDir keys directory - * @param curDate current date - * @return return a list of subtasks - * @private + * Prepare parameter and update the network node chart + * @param ctx */ - _nodeTlsKeyTaskList (nodeIds, keysDir, curDate = new Date()) { - // check if nodeIds is an array of strings - if (!Array.isArray(nodeIds) || !nodeIds.every((nodeId) => typeof nodeId === 'string')) { - throw new FullstackTestingError('nodeIds must be an array of strings') + async chartUpdateTask (ctx) { + const config = ctx.config + + if (!config.serviceMap) { + config.serviceMap = await this.accountManager.getNodeServiceMap(config.namespace) + } + + const index = config.existingNodeIds.length + const nodeId = Templates.nodeNumberFromNodeId(config.nodeId) - 1 + + let valuesArg = '' + for (let i = 0; i < index; i++) { + if ((config.newAccountNumber && i !== nodeId) || !config.newAccountNumber) { // for the case of updating node + valuesArg += ` --set "hedera.nodes[${i}].accountId=${config.serviceMap.get(config.existingNodeIds[i]).accountId}" --set "hedera.nodes[${i}].name=${config.existingNodeIds[i]}"` + } else { + // use new account number for this node id + valuesArg += ` --set "hedera.nodes[${i}].accountId=${config.newAccountNumber}" --set "hedera.nodes[${i}].name=${config.existingNodeIds[i]}"` + } } - const self = this - const nodeKeyFiles = new Map() - const subTasks = [] - subTasks.push({ - title: 'Backup old files', - task: () => helpers.backupOldTlsKeys(nodeIds, keysDir, curDate) + // for the case of adding new node + if (ctx.newNode && ctx.newNode.accountId) { + valuesArg += ` --set "hedera.nodes[${index}].accountId=${ctx.newNode.accountId}" --set "hedera.nodes[${index}].name=${ctx.newNode.name}"` } + this.profileValuesFile = await this.profileManager.prepareValuesForNodeAdd( + path.join(config.stagingDir, 'config.txt'), + path.join(config.stagingDir, 'templates', 'application.properties')) + if (this.profileValuesFile) { + valuesArg += this.prepareValuesFiles(this.profileValuesFile) + } + + valuesArg = addDebugOptions(valuesArg, config.debugNodeId) + + await this.chartManager.upgrade( + config.namespace, + constants.FULLSTACK_DEPLOYMENT_CHART, + config.chartPath, + valuesArg, + config.fstChartVersion ) + } - for (const nodeId of nodeIds) { - subTasks.push({ - title: `TLS key for node: ${chalk.yellow(nodeId)}`, - task: async () => { - const tlsKey = await self.keyManager.generateGrpcTLSKey(nodeId) - const tlsKeyFiles = await self.keyManager.storeTLSKey(nodeId, tlsKey, keysDir) - nodeKeyFiles.set(nodeId, { - tlsKeyFiles - }) - } - }) + /** + * Update account manager and transfer hbar for staking purpose + * @param config + */ + async triggerStakeCalculation (config) { + this.logger.info('sleep 60 seconds for the handler to be able to trigger the network node stake weight recalculate') + await sleep(60000) + const accountMap = getNodeAccountMap(config.allNodeIds) + + if (config.newAccountNumber) { + // update map with current account ids + accountMap.set(config.nodeId, config.newAccountNumber) + + // update _nodeClient with the new service map since one of the account number has changed + await this.accountManager.refreshNodeClient(config.namespace) } - return subTasks + // send some write transactions to invoke the handler that will trigger the stake weight recalculate + for (const nodeId of config.allNodeIds) { + const accountId = accountMap.get(nodeId) + config.nodeClient.setOperator(TREASURY_ACCOUNT_ID, config.treasuryKey) + await this.accountManager.transferAmount(constants.TREASURY_ACCOUNT_ID, accountId, 1) + } } - async _copyNodeKeys (nodeKey, destDir) { - for (const keyFile of [nodeKey.privateKeyFile, nodeKey.certificateFile]) { - if (!fs.existsSync(keyFile)) { - throw new FullstackTestingError(`file (${keyFile}) is missing`) - } + /** + * Identify existing network nodes and check if they are running + * @param {any} ctx + * @param {TaskWrapper} task + * @param config + */ + async identifyExistingNetworkNodes (ctx, task, config) { + config.existingNodeIds = [] + config.serviceMap = await this.accountManager.getNodeServiceMap( + config.namespace) + for (/** @type {NetworkNodeServices} **/ const networkNodeServices of config.serviceMap.values()) { + config.existingNodeIds.push(networkNodeServices.nodeName) + } + config.allNodeIds = [...config.existingNodeIds] + return this.taskCheckNetworkNodePods(ctx, task, config.existingNodeIds) + } + + /** + * Download generated config files and key files from the network node + * @param config + */ + async downloadNodeGeneratedFiles (config) { + const node1FullyQualifiedPodName = Templates.renderNetworkPodName(config.existingNodeIds[0]) + + // copy the config.txt file from the node1 upgrade directory + await this.k8.copyFrom(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, `${constants.HEDERA_HAPI_PATH}/data/upgrade/current/config.txt`, config.stagingDir) + + // if directory data/upgrade/current/data/keys does not exist then use data/upgrade/current + let keyDir = `${constants.HEDERA_HAPI_PATH}/data/upgrade/current/data/keys` + if (!await this.k8.hasDir(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, keyDir)) { + keyDir = `${constants.HEDERA_HAPI_PATH}/data/upgrade/current` + } + const signedKeyFiles = (await this.k8.listDir(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, keyDir)).filter(file => file.name.startsWith(constants.SIGNING_KEY_PREFIX)) + await this.k8.execContainer(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, ['bash', '-c', `mkdir -p ${constants.HEDERA_HAPI_PATH}/data/keys_backup && cp -r ${keyDir} ${constants.HEDERA_HAPI_PATH}/data/keys_backup/`]) + for (const signedKeyFile of signedKeyFiles) { + await this.k8.copyFrom(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, `${keyDir}/${signedKeyFile.name}`, `${config.keysDir}`) + } - const fileName = path.basename(keyFile) - fs.cpSync(keyFile, `${destDir}/${fileName}`) + if (await this.k8.hasFile(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, `${constants.HEDERA_HAPI_PATH}/data/upgrade/current/application.properties`)) { + await this.k8.copyFrom(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, `${constants.HEDERA_HAPI_PATH}/data/upgrade/current/application.properties`, `${config.stagingDir}/templates`) } } - async initializeSetup (config, configManager, k8) { + async initializeSetup (config, k8) { // compute other config parameters - config.releasePrefix = Templates.prepareReleasePrefix(config.releaseTag) - config.buildZipFile = `${config.cacheDir}/${config.releasePrefix}/build-${config.releaseTag}.zip` config.keysDir = path.join(validatePath(config.cacheDir), 'keys') - config.stagingDir = Templates.renderStagingDir(configManager, flags) + config.stagingDir = Templates.renderStagingDir( + config.cacheDir, + config.releaseTag + ) config.stagingKeysDir = path.join(validatePath(config.stagingDir), 'keys') if (!await k8.hasNamespace(config.namespace)) { @@ -358,7 +812,14 @@ export class NodeCommand extends BaseCommand { } } - uploadPlatformSoftware (ctx, task, localBuildPath) { + /** + * @param {string[]} nodeIds + * @param {Object} podNames + * @param {TaskWrapper} task + * @param {string} localBuildPath + * @returns {Listr<*, *, *>} + */ + uploadPlatformSoftware (nodeIds, podNames, task, localBuildPath) { const self = this const subTasks = [] @@ -377,8 +838,8 @@ export class NodeCommand extends BaseCommand { } let localDataLibBuildPath - for (const nodeId of ctx.config.nodeIds) { - const podName = ctx.config.podNames[nodeId] + for (const nodeId of nodeIds) { + const podName = podNames[nodeId] if (buildPathMap.has(nodeId)) { localDataLibBuildPath = buildPathMap.get(nodeId) } else { @@ -410,16 +871,39 @@ export class NodeCommand extends BaseCommand { }) } - fetchPlatformSoftware (ctx, task, platformInstaller) { - const config = ctx.config + /** + * @param {string[]} nodeIds + * @param {Object} podNames + * @param {string} releaseTag + * @param {TaskWrapper} task + * @param {string} localBuildPath + * @returns {Listr<*, *, *>} + */ + fetchLocalOrReleasedPlatformSoftware (nodeIds, podNames, releaseTag, task, localBuildPath) { + const self = this + if (localBuildPath !== '') { + return self.uploadPlatformSoftware(nodeIds, podNames, task, localBuildPath) + } else { + return self.fetchPlatformSoftware(nodeIds, podNames, releaseTag, task, self.platformInstaller) + } + } + /** + * @param {string[]} nodeIds + * @param {Object} podNames + * @param {string} releaseTag + * @param {TaskWrapper} task + * @param {PlatformInstaller} platformInstaller + * @returns {Listr} + */ + fetchPlatformSoftware (nodeIds, podNames, releaseTag, task, platformInstaller) { const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - const podName = ctx.config.podNames[nodeId] + for (const nodeId of nodeIds) { + const podName = podNames[nodeId] subTasks.push({ - title: `Update node: ${chalk.yellow(nodeId)}`, - task: () => - platformInstaller.fetchPlatform(podName, config.releaseTag) + title: `Update node: ${chalk.yellow(nodeId)} [ platformVersion = ${releaseTag} ]`, + task: async () => + await platformInstaller.fetchPlatform(podName, releaseTag) }) } @@ -432,40 +916,182 @@ export class NodeCommand extends BaseCommand { }) } - async setup (argv) { - const self = this - + async loadPermCertificate (certFullPath) { + const certPem = fs.readFileSync(certFullPath).toString() + const decodedDers = x509.PemConverter.decode(certPem) + if (!decodedDers || decodedDers.length === 0) { + throw new FullstackTestingError('unable to load perm key: ' + certFullPath) + } + return (new Uint8Array(decodedDers[0])) + } + + async prepareUpgradeZip (stagingDir) { + // we build a mock upgrade.zip file as we really don't need to upgrade the network + // also the platform zip file is ~80Mb in size requiring a lot of transactions since the max + // transaction size is 6Kb and in practice we need to send the file as 4Kb chunks. + // Note however that in DAB phase-2, we won't need to trigger this fake upgrade process + const zipper = new Zippy(this.logger) + const upgradeConfigDir = path.join(stagingDir, 'mock-upgrade', 'data', 'config') + if (!fs.existsSync(upgradeConfigDir)) { + fs.mkdirSync(upgradeConfigDir, { recursive: true }) + } + + // bump field hedera.config.version + const fileBytes = fs.readFileSync(path.join(stagingDir, 'templates', 'application.properties')) + const lines = fileBytes.toString().split('\n') + const newLines = [] + for (let line of lines) { + line = line.trim() + const parts = line.split('=') + if (parts.length === 2) { + if (parts[0] === 'hedera.config.version') { + let version = parseInt(parts[1]) + line = `hedera.config.version=${++version}` + } + newLines.push(line) + } + } + fs.writeFileSync(path.join(upgradeConfigDir, 'application.properties'), newLines.join('\n')) + + return await zipper.zip(path.join(stagingDir, 'mock-upgrade'), path.join(stagingDir, 'mock-upgrade.zip')) + } + + /** + * @param {string} upgradeZipFile + * @param nodeClient + * @returns {Promise} + */ + async uploadUpgradeZip (upgradeZipFile, nodeClient) { + // get byte value of the zip file + const zipBytes = fs.readFileSync(upgradeZipFile) + const zipHash = crypto.createHash('sha384').update(zipBytes).digest('hex') + this.logger.debug(`loaded upgrade zip file [ zipHash = ${zipHash} zipBytes.length = ${zipBytes.length}, zipPath = ${upgradeZipFile}]`) + + // create a file upload transaction to upload file to the network + try { + let start = 0 + + while (start < zipBytes.length) { + const zipBytesChunk = new Uint8Array(zipBytes.subarray(start, constants.UPGRADE_FILE_CHUNK_SIZE)) + let fileTransaction = null + + if (start === 0) { + fileTransaction = new FileUpdateTransaction() + .setFileId(constants.UPGRADE_FILE_ID) + .setContents(zipBytesChunk) + } else { + fileTransaction = new FileAppendTransaction() + .setFileId(constants.UPGRADE_FILE_ID) + .setContents(zipBytesChunk) + } + const resp = await fileTransaction.execute(nodeClient) + const receipt = await resp.getReceipt(nodeClient) + this.logger.debug(`updated file ${constants.UPGRADE_FILE_ID} [chunkSize= ${zipBytesChunk.length}, txReceipt = ${receipt.toString()}]`) + + start += constants.UPGRADE_FILE_CHUNK_SIZE + } + + return zipHash + } catch (e) { + throw new FullstackTestingError(`failed to upload build.zip file: ${e.message}`, e) + } + } + + /** + * @param {string} endpointType + * @param {string[]} endpoints + * @param {number} defaultPort + * @returns {ServiceEndpoint[]} + */ + prepareEndpoints (endpointType, endpoints, defaultPort) { + const ret = /** @typedef ServiceEndpoint **/[] + for (const endpoint of endpoints) { + const parts = endpoint.split(':') + + let url = '' + let port = defaultPort + + if (parts.length === 2) { + url = parts[0].trim() + port = parts[1].trim() + } else if (parts.length === 1) { + url = parts[0] + } else { + throw new FullstackTestingError(`incorrect endpoint format. expected url:port, found ${endpoint}`) + } + + if (endpointType.toUpperCase() === constants.ENDPOINT_TYPE_IP) { + ret.push(new ServiceEndpoint({ + port, + ipAddressV4: helpers.parseIpAddressToUint8Array(url) + })) + } else { + ret.push(new ServiceEndpoint({ + port, + domainName: url + })) + } + } + + return ret + } + + // List of Commands + /** + * @param {Object} argv + * @returns {Promise} + */ + async setup (argv) { + const self = this + const tasks = new Listr([ { title: 'Initialize', task: async (ctx, task) => { self.configManager.update(argv) - await prompts.execute(task, self.configManager, [ - flags.cacheDir, - flags.chainId, - flags.generateGossipKeys, - flags.generateTlsKeys, - flags.keyFormat, - flags.namespace, - flags.nodeIDs, - flags.releaseTag - ]) - const config = { - cacheDir: self.configManager.getFlag(flags.cacheDir), - chainId: self.configManager.getFlag(flags.chainId), - curDate: new Date(), - devMode: self.configManager.getFlag(flags.devMode), - force: self.configManager.getFlag(flags.force), - generateGossipKeys: self.configManager.getFlag(flags.generateGossipKeys), - generateTlsKeys: self.configManager.getFlag(flags.generateTlsKeys), - keyFormat: self.configManager.getFlag(flags.keyFormat), - namespace: self.configManager.getFlag(flags.namespace), - nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)), - releaseTag: self.configManager.getFlag(flags.releaseTag) - } + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.app, + flags.appConfig, + flags.devMode, + flags.localBuildPath + ]) - await self.initializeSetup(config, self.configManager, self.k8) + await prompts.execute(task, self.configManager, NodeCommand.SETUP_FLAGS_LIST) + + /** + * @typedef {Object} NodeSetupConfigClass + * -- flags -- + * @property {string} app + * @property {string} appConfig + * @property {string} cacheDir + * @property {boolean} devMode + * @property {string} localBuildPath + * @property {string} namespace + * @property {string} nodeIDs + * @property {string} releaseTag + * -- extra args -- + * @property {string[]} nodeIds + * @property {string[]} podNames + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + // create a config object for subsequent steps + const config = /** @type {NodeSetupConfigClass} **/ this.getConfig(NodeCommand.SETUP_CONFIGS_NAME, NodeCommand.SETUP_FLAGS_LIST, + [ + 'nodeIds', + 'podNames' + ]) + + config.nodeIds = helpers.parseNodeIds(config.nodeIDs) + + await self.initializeSetup(config, self.k8) // set config in the context for later tasks to use ctx.config = config @@ -477,137 +1103,18 @@ export class NodeCommand extends BaseCommand { title: 'Identify network pods', task: (ctx, task) => self.taskCheckNetworkNodePods(ctx, task, ctx.config.nodeIds) }, - { - title: 'Generate Gossip keys', - task: async (ctx, parentTask) => { - const config = ctx.config - const subTasks = self._nodeGossipKeysTaskList(config.keyFormat, config.nodeIds, config.keysDir, config.curDate) - // set up the sub-tasks - return parentTask.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false, - timer: constants.LISTR_DEFAULT_RENDERER_TIMER_OPTION - } - }) - }, - skip: (ctx, _) => !ctx.config.generateGossipKeys - }, - { - title: 'Generate gRPC TLS keys', - task: async (ctx, parentTask) => { - const config = ctx.config - const subTasks = self._nodeTlsKeyTaskList(config.nodeIds, config.keysDir, config.curDate) - // set up the sub-tasks - return parentTask.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false, - timer: constants.LISTR_DEFAULT_RENDERER_TIMER_OPTION - } - }) - }, - skip: (ctx, _) => !ctx.config.generateTlsKeys - }, - { - title: 'Prepare staging directory', - task: async (ctx, parentTask) => { - const config = ctx.config - const subTasks = [ - { - title: 'Copy configuration files', - task: () => { - for (const flag of flags.nodeConfigFileFlags.values()) { - const filePath = self.configManager.getFlag(flag) - if (!filePath) { - throw new FullstackTestingError(`Configuration file path is missing for: ${flag.name}`) - } - - const fileName = path.basename(filePath) - const destPath = `${config.stagingDir}/templates/${fileName}` - self.logger.debug(`Copying configuration file to staging: ${filePath} -> ${destPath}`) - - fs.cpSync(filePath, destPath, { force: true }) - } - } - }, - { - title: 'Copy Gossip keys to staging', - task: async (ctx, _) => { - const config = ctx.config - - await this.copyGossipKeysToStaging(config, ctx.config.nodeIds) - } - }, - { - title: 'Copy gRPC TLS keys to staging', - task: async (ctx, _) => { - const config = ctx.config - for (const nodeId of ctx.config.nodeIds) { - const tlsKeyFiles = self.keyManager.prepareTLSKeyFilePaths(nodeId, config.keysDir) - await self._copyNodeKeys(tlsKeyFiles, config.stagingKeysDir) - } - } - }, - { - title: 'Prepare config.txt for the network', - task: async (ctx, _) => { - const config = ctx.config - const configTxtPath = `${config.stagingDir}/config.txt` - const template = `${constants.RESOURCES_DIR}/templates/config.template` - const appName = self.configManager.getFlag(flags.app) - await self.platformInstaller.prepareConfigTxt(config.nodeIds, configTxtPath, config.releaseTag, config.chainId, template, appName || undefined) - } - } - ] - - return parentTask.newListr(subTasks, { - concurrent: false, - rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION - }) - } - }, { title: 'Fetch platform software into network nodes', task: async (ctx, task) => { - const localBuildPath = self.configManager.getFlag(flags.localBuildPath) - if (localBuildPath !== '') { - return self.uploadPlatformSoftware(ctx, task, localBuildPath) - } else { - return self.fetchPlatformSoftware(ctx, task, self.platformInstaller) - } + const config = /** @type {NodeSetupConfigClass} **/ ctx.config + return self.fetchLocalOrReleasedPlatformSoftware(config.nodeIds, config.podNames, config.releaseTag, task, config.localBuildPath) } }, { title: 'Setup network nodes', task: async (ctx, parentTask) => { - const config = ctx.config - - const subTasks = [] - for (const nodeId of config.nodeIds) { - const podName = config.podNames[nodeId] - subTasks.push({ - title: `Node: ${chalk.yellow(nodeId)}`, - task: () => - self.platformInstaller.taskInstall(podName, config.buildZipFile, config.stagingDir, config.nodeIds, config.keyFormat, config.force) - }) - } - - // set up the sub-tasks - return parentTask.newListr(subTasks, { - concurrent: true, - rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION - }) - } - }, - { - title: 'Finalize', - task: (ctx, _) => { - // reset flags so that keys are not regenerated later - self.configManager.setFlag(flags.generateGossipKeys, false) - self.configManager.setFlag(flags.generateTlsKeys, false) - self.configManager.persist() + return this.setupNodesTask(ctx, parentTask, ctx.config.nodeIds) } } ], { @@ -624,6 +1131,10 @@ export class NodeCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async start (argv) { const self = this @@ -638,12 +1149,17 @@ export class NodeCommand extends BaseCommand { ]) ctx.config = { + app: self.configManager.getFlag(flags.app), cacheDir: self.configManager.getFlag(flags.cacheDir), + debugNodeId: self.configManager.getFlag(flags.debugNodeId), namespace: self.configManager.getFlag(flags.namespace), nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)) } - ctx.config.stagingDir = Templates.renderStagingDir(self.configManager, flags) + ctx.config.stagingDir = Templates.renderStagingDir( + self.configManager.getFlag(flags.cacheDir), + self.configManager.getFlag(flags.releaseTag) + ) if (!await self.k8.hasNamespace(ctx.config.namespace)) { throw new FullstackTestingError(`namespace ${ctx.config.namespace} does not exist`) @@ -651,74 +1167,61 @@ export class NodeCommand extends BaseCommand { } }, { - title: 'Identify network pods', - task: (ctx, task) => self.taskCheckNetworkNodePods(ctx, task, ctx.config.nodeIds) + title: 'Identify existing network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + return this.identifyExistingNetworkNodes(ctx, task, config) + } }, { title: 'Starting nodes', task: (ctx, task) => { - const subTasks = [] - self.startNodes(ctx.config, ctx.config.nodeIds, subTasks) - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: true, - rendererOptions: { - collapseSubtasks: false, - timer: constants.LISTR_DEFAULT_RENDERER_TIMER_OPTION - } - }) + return this.startNetworkNodesTask(task, ctx.config.podNames, ctx.config.nodeIds) } }, + { + title: 'Enable port forwarding for JVM debugger', + task: async (ctx, _) => { + await this.enableJVMPortForwarding(ctx.config.debugNodeId) + }, + skip: (ctx, _) => !ctx.config.debugNodeId + }, { title: 'Check nodes are ACTIVE', task: (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - if (self.configManager.getFlag(flags.app) !== '') { - subTasks.push({ - title: `Check node: ${chalk.yellow(nodeId)}`, - task: () => self.checkNetworkNodeState(nodeId, 100, 'ACTIVE', 'output/swirlds.log') - }) - } else { - subTasks.push({ - title: `Check node: ${chalk.yellow(nodeId)}`, - task: () => self.checkNetworkNodeState(nodeId) - }) - } - } - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false - } - }) + return this.checkNodeActivenessTask(ctx, task, ctx.config.nodeIds) } }, { title: 'Check node proxies are ACTIVE', task: async (ctx, parentTask) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - subTasks.push({ - title: `Check proxy for node: ${chalk.yellow(nodeId)}`, - task: async () => await self.k8.waitForPodReady( - [`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy'], - 1, 300, 2000) + return self.checkNodesProxiesTask(ctx, parentTask, ctx.config.nodeIds) + }, + skip: (ctx, _) => self.configManager.getFlag(flags.app) !== '' && self.configManager.getFlag(flags.app) !== constants.HEDERA_APP_NAME + }, + { + title: 'Add node stakes', + task: (ctx, task) => { + if (ctx.config.app === '' || ctx.config.app === constants.HEDERA_APP_NAME) { + const subTasks = [] + const accountMap = getNodeAccountMap(ctx.config.nodeIds) + for (const nodeId of ctx.config.nodeIds) { + const accountId = accountMap.get(nodeId) + subTasks.push({ + title: `Adding stake for node: ${chalk.yellow(nodeId)}`, + task: async () => await self.addStake(ctx.config.namespace, accountId, nodeId) + }) + } + + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: false, + rendererOptions: { + collapseSubtasks: false + } }) } - - // set up the sub-tasks - return parentTask.newListr(subTasks, { - concurrent: true, - rendererOptions: { - collapseSubtasks: false - } - }) - }, - skip: (ctx, _) => self.configManager.getFlag(flags.app) !== '' + } }], { concurrent: false, rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION @@ -736,6 +1239,10 @@ export class NodeCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async stop (argv) { const self = this @@ -771,7 +1278,7 @@ export class NodeCommand extends BaseCommand { const podName = ctx.config.podNames[nodeId] subTasks.push({ title: `Stop node: ${chalk.yellow(nodeId)}`, - task: () => self.k8.execContainer(podName, constants.ROOT_CONTAINER, 'systemctl stop network-node') + task: async () => await self.k8.execContainer(podName, constants.ROOT_CONTAINER, 'systemctl stop network-node') }) } @@ -799,6 +1306,10 @@ export class NodeCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async keys (argv) { const self = this const tasks = new Listr([ @@ -806,24 +1317,45 @@ export class NodeCommand extends BaseCommand { title: 'Initialize', task: async (ctx, task) => { self.configManager.update(argv) - await prompts.execute(task, self.configManager, [ - flags.cacheDir, - flags.generateGossipKeys, - flags.generateTlsKeys, - flags.keyFormat, - flags.nodeIDs + + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.devMode ]) - const config = { - cacheDir: self.configManager.getFlag(flags.cacheDir), - curDate: new Date(), - devMode: self.configManager.getFlag(flags.devMode), - generateGossipKeys: self.configManager.getFlag(flags.generateGossipKeys), - generateTlsKeys: self.configManager.getFlag(flags.generateTlsKeys), - keyFormat: self.configManager.getFlag(flags.keyFormat), - keysDir: path.join(self.configManager.getFlag(flags.cacheDir), 'keys'), - nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)) - } + await prompts.execute(task, self.configManager, NodeCommand.KEYS_FLAGS_LIST) + + /** + * @typedef {Object} NodeKeysConfigClass + * -- flags -- + * @property {string} cacheDir + * @property {boolean} devMode + * @property {boolean} generateGossipKeys + * @property {boolean} generateTlsKeys + * @property {string} nodeIDs + * -- extra args -- + * @property {Date} curDate + * @property {string} keysDir + * @property {string[]} nodeIds + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + // create a config object for subsequent steps + const config = /** @type {NodeKeysConfigClass} **/ this.getConfig(NodeCommand.KEYS_CONFIGS_NAME, NodeCommand.KEYS_FLAGS_LIST, + [ + 'curDate', + 'keysDir', + 'nodeIds' + ]) + + config.curDate = new Date() + config.nodeIds = helpers.parseNodeIds(config.nodeIDs) + config.keysDir = path.join(self.configManager.getFlag(flags.cacheDir), 'keys') if (!fs.existsSync(config.keysDir)) { fs.mkdirSync(config.keysDir) @@ -836,7 +1368,7 @@ export class NodeCommand extends BaseCommand { title: 'Generate gossip keys', task: async (ctx, parentTask) => { const config = ctx.config - const subTasks = self._nodeGossipKeysTaskList(config.keyFormat, config.nodeIds, config.keysDir, config.curDate) + const subTasks = self.keyManager.taskGenerateGossipKeys(self.keytoolDepManager, config.nodeIds, config.keysDir, config.curDate) // set up the sub-tasks return parentTask.newListr(subTasks, { concurrent: false, @@ -852,7 +1384,7 @@ export class NodeCommand extends BaseCommand { title: 'Generate gRPC TLS keys', task: async (ctx, parentTask) => { const config = ctx.config - const subTasks = self._nodeTlsKeyTaskList(config.nodeIds, config.keysDir, config.curDate) + const subTasks = self.keyManager.taskGenerateTLSKeys(config.nodeIds, config.keysDir, config.curDate) // set up the sub-tasks return parentTask.newListr(subTasks, { concurrent: true, @@ -884,6 +1416,10 @@ export class NodeCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async refresh (argv) { const self = this @@ -892,31 +1428,48 @@ export class NodeCommand extends BaseCommand { title: 'Initialize', task: async (ctx, task) => { self.configManager.update(argv) - await prompts.execute(task, self.configManager, [ - flags.cacheDir, - flags.keyFormat, - flags.namespace, - flags.nodeIDs, - flags.releaseTag + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.app, + flags.devMode, + flags.localBuildPath ]) - const config = { - cacheDir: self.configManager.getFlag(flags.cacheDir), - curDate: new Date(), - devMode: self.configManager.getFlag(flags.devMode), - force: self.configManager.getFlag(flags.force), - keyFormat: self.configManager.getFlag(flags.keyFormat), - namespace: self.configManager.getFlag(flags.namespace), - nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)), - releaseTag: self.configManager.getFlag(flags.releaseTag) - } - - await self.initializeSetup(config, self.configManager, self.k8) - - // set config in the context for later tasks to use - ctx.config = config - - self.logger.debug('Initialized config', { config }) + await prompts.execute(task, self.configManager, NodeCommand.REFRESH_FLAGS_LIST) + + /** + * @typedef {Object} NodeRefreshConfigClass + * -- flags -- + * @property {string} app + * @property {string} cacheDir + * @property {boolean} devMode + * @property {string} localBuildPath + * @property {string} namespace + * @property {string} nodeIDs + * @property {string} releaseTag + * -- extra args -- + * @property {string[]} nodeIds + * @property {Object} podNames + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + // create a config object for subsequent steps + ctx.config = /** @type {NodeRefreshConfigClass} **/ this.getConfig(NodeCommand.REFRESH_CONFIGS_NAME, NodeCommand.REFRESH_FLAGS_LIST, + [ + 'nodeIds', + 'podNames' + ]) + + ctx.config.nodeIds = helpers.parseNodeIds(ctx.config.nodeIDs) + + await self.initializeSetup(ctx.config, self.k8) + + self.logger.debug('Initialized config', ctx.config) } }, { @@ -926,112 +1479,51 @@ export class NodeCommand extends BaseCommand { { title: 'Dump network nodes saved state', task: - async (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - const podName = ctx.config.podNames[nodeId] - subTasks.push({ - title: `Node: ${chalk.yellow(nodeId)}`, - task: async () => - await self.k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -rf ${constants.HEDERA_HAPI_PATH}/data/saved/*`]) - }) - } - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: true, - rendererOptions: { - collapseSubtasks: false - } + async (ctx, task) => { + const config = /** @type {NodeRefreshConfigClass} **/ ctx.config + const subTasks = [] + for (const nodeId of config.nodeIds) { + const podName = config.podNames[nodeId] + subTasks.push({ + title: `Node: ${chalk.yellow(nodeId)}`, + task: async () => + await self.k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -rf ${constants.HEDERA_HAPI_PATH}/data/saved/*`]) }) } + + // set up the sub-tasks + return task.newListr(subTasks, { + concurrent: true, + rendererOptions: { + collapseSubtasks: false + } + }) + } }, { title: 'Fetch platform software into network nodes', task: - async (ctx, task) => { - return self.fetchPlatformSoftware(ctx, task, self.platformInstaller) - } + async (ctx, task) => { + const config = /** @type {NodeRefreshConfigClass} **/ ctx.config + return self.fetchLocalOrReleasedPlatformSoftware(config.nodeIds, config.podNames, config.releaseTag, task, config.localBuildPath) + } }, { title: 'Setup network nodes', task: async (ctx, parentTask) => { - const config = ctx.config - - const subTasks = [] - const nodeList = [] - const networkNodeServicesMap = await self.accountManager.getNodeServiceMap(ctx.config.namespace) - for (const networkNodeServices of networkNodeServicesMap.values()) { - nodeList.push(networkNodeServices.nodeName) - } - - for (const nodeId of config.nodeIds) { - const podName = config.podNames[nodeId] - subTasks.push({ - title: `Node: ${chalk.yellow(nodeId)}`, - task: () => - self.platformInstaller.taskInstall(podName, config.buildZipFile, - config.stagingDir, nodeList, config.keyFormat, config.force) - }) - } - - // set up the sub-tasks - return parentTask.newListr(subTasks, { - concurrent: true, - rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION - }) - } - }, - { - title: 'Finalize', - task: (ctx, _) => { - // reset flags so that keys are not regenerated later - self.configManager.setFlag(flags.generateGossipKeys, false) - self.configManager.setFlag(flags.generateTlsKeys, false) - self.configManager.persist() + return this.setupNodesTask(ctx, parentTask, ctx.config.nodeIds) } }, { title: 'Starting nodes', task: (ctx, task) => { - const subTasks = [] - self.startNodes(ctx.config, ctx.config.nodeIds, subTasks) - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: true, - rendererOptions: { - collapseSubtasks: false, - timer: constants.LISTR_DEFAULT_RENDERER_TIMER_OPTION - } - }) + return this.startNetworkNodesTask(task, ctx.config.podNames, ctx.config.nodeIds) } }, { title: 'Check nodes are ACTIVE', task: (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - if (self.configManager.getFlag(flags.app) !== '') { - subTasks.push({ - title: `Check node: ${chalk.yellow(nodeId)}`, - task: () => self.checkNetworkNodeState(nodeId, 100, 'ACTIVE', 'output/swirlds.log') - }) - } else { - subTasks.push({ - title: `Check node: ${chalk.yellow(nodeId)}`, - task: () => self.checkNetworkNodeState(nodeId) - }) - } - } - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false - } - }) + return this.checkNodeActivenessTask(ctx, task, ctx.config.nodeIds) } }, { @@ -1039,25 +1531,9 @@ export class NodeCommand extends BaseCommand { // this is more reliable than checking the nodes logs for ACTIVE, as the // logs will have a lot of white noise from being behind task: async (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - subTasks.push({ - title: `Check proxy for node: ${chalk.yellow(nodeId)}`, - task: async () => await self.k8.waitForPodReady( - [`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy'], - 1, 300, 2000) - }) - } - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false - } - }) + return this.checkNodesProxiesTask(ctx, task, ctx.config.nodeIds) }, - skip: (ctx, _) => self.configManager.getFlag(flags.app) !== '' + skip: (ctx, _) => ctx.config.app !== '' }], { concurrent: false, rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION @@ -1072,6 +1548,10 @@ export class NodeCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async logs (argv) { const self = this @@ -1112,74 +1592,161 @@ export class NodeCommand extends BaseCommand { return true } - async add (argv) { + addInitializeTask (argv) { const self = this - const tasks = new Listr([ - { - title: 'Initialize', - task: async (ctx, task) => { - self.configManager.update(argv) - await prompts.execute(task, self.configManager, [ - flags.cacheDir, - flags.chainId, - flags.generateGossipKeys, - flags.generateTlsKeys, - flags.keyFormat, - flags.namespace, - flags.nodeIDs, - flags.releaseTag + return { + title: 'Initialize', + task: async (ctx, task) => { + self.configManager.update(argv) + + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.adminKey, + flags.app, + flags.chainId, + flags.chartDirectory, + flags.outputDir, + flags.devMode, + flags.debugNodeId, + flags.endpointType, + flags.force, + flags.fstChartVersion, + flags.localBuildPath, + flags.gossipEndpoints, + flags.grpcEndpoints + ]) + + await prompts.execute(task, self.configManager, NodeCommand.ADD_FLAGS_LIST) + + /** + * @typedef {Object} NodeAddConfigClass + * -- flags -- + * @property {string} app + * @property {string} cacheDir + * @property {string} chainId + * @property {string} chartDirectory + * @property {boolean} devMode + * @property {string} debugNodeId + * @property {string} endpointType + * @property {string} fstChartVersion + * @property {boolean} generateGossipKeys + * @property {boolean} generateTlsKeys + * @property {string} gossipEndpoints + * @property {string} grpcEndpoints + * @property {string} localBuildPath + * @property {string} namespace + * @property {string} nodeId + * @property {string} releaseTag + * -- extra args -- + * @property {PrivateKey} adminKey + * @property {string[]} allNodeIds + * @property {string} chartPath + * @property {Date} curDate + * @property {string[]} existingNodeIds + * @property {string} freezeAdminPrivateKey + * @property {string} keysDir + * @property {string} lastStateZipPath + * @property {Object} nodeClient + * @property {Object} podNames + * @property {Map} serviceMap + * @property {PrivateKey} treasuryKey + * @property {string} stagingDir + * @property {string} stagingKeysDir + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + // create a config object for subsequent steps + const config = /** @type {NodeAddConfigClass} **/ this.getConfig(NodeCommand.ADD_CONFIGS_NAME, NodeCommand.ADD_FLAGS_LIST, + [ + 'allNodeIds', + 'chartPath', + 'curDate', + 'existingNodeIds', + 'freezeAdminPrivateKey', + 'keysDir', + 'lastStateZipPath', + 'nodeClient', + 'podNames', + 'serviceMap', + 'stagingDir', + 'stagingKeysDir', + 'treasuryKey' ]) - const config = { - cacheDir: self.configManager.getFlag(flags.cacheDir), - chainId: self.configManager.getFlag(flags.chainId), - chartDir: self.configManager.getFlag(flags.chartDirectory), - curDate: new Date(), - devMode: self.configManager.getFlag(flags.devMode), - existingNodeIds: [], - force: self.configManager.getFlag(flags.force), - fstChartVersion: self.configManager.getFlag(flags.fstChartVersion), - generateGossipKeys: self.configManager.getFlag(flags.generateGossipKeys), - generateTlsKeys: self.configManager.getFlag(flags.generateTlsKeys), - keyFormat: self.configManager.getFlag(flags.keyFormat), - namespace: self.configManager.getFlag(flags.namespace), - nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)), - releaseTag: self.configManager.getFlag(flags.releaseTag) - } + ctx.adminKey = argv[flags.adminKey.name] ? PrivateKey.fromStringED25519(argv[flags.adminKey.name]) : PrivateKey.fromStringED25519(constants.GENESIS_KEY) + config.curDate = new Date() + config.existingNodeIds = [] - await self.initializeSetup(config, self.configManager, self.k8) + if (config.keyFormat !== constants.KEY_FORMAT_PEM) { + throw new FullstackTestingError('key type cannot be PFX') + } - // set config in the context for later tasks to use - ctx.config = config + await self.initializeSetup(config, self.k8) - ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDir, - constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) + // set config in the context for later tasks to use + ctx.config = config - // initialize Node Client with existing network nodes prior to adding the new node which isn't functioning, yet - await this.accountManager.loadNodeClient(ctx.config.namespace) + ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDirectory, + constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) - self.logger.debug('Initialized config', { config }) - } - }, + // initialize Node Client with existing network nodes prior to adding the new node which isn't functioning, yet + ctx.config.nodeClient = await this.accountManager.loadNodeClient(ctx.config.namespace) + + const accountKeys = await this.accountManager.getAccountKeysFromSecret(FREEZE_ADMIN_ACCOUNT, config.namespace) + config.freezeAdminPrivateKey = accountKeys.privateKey + + const treasuryAccount = await this.accountManager.getTreasuryAccountKeys(config.namespace) + const treasuryAccountPrivateKey = treasuryAccount.privateKey + config.treasuryKey = PrivateKey.fromStringED25519(treasuryAccountPrivateKey) + + config.serviceMap = await self.accountManager.getNodeServiceMap( + config.namespace) + + self.logger.debug('Initialized config', { config }) + } + } + } + + getIdentifyExistingNetworkNodesTask (argv) { + return { + title: 'Identify existing network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + return this.identifyExistingNetworkNodes(ctx, task, config) + } + } + } + + getAddPrepareTasks (argv) { + const self = this + + return [ + self.addInitializeTask(argv), { - title: 'Identify existing network nodes', + title: 'Check that PVCs are enabled', task: async (ctx, task) => { - ctx.config.serviceMap = await self.accountManager.getNodeServiceMap( - ctx.config.namespace) - for (/** @type {NetworkNodeServices} **/ const networkNodeServices of ctx.config.serviceMap.values()) { - ctx.config.existingNodeIds.push(networkNodeServices.nodeName) + if (!self.configManager.getFlag(flags.persistentVolumeClaims)) { + throw new FullstackTestingError('PVCs are not enabled. Please enable PVCs before adding a node') } - - return self.taskCheckNetworkNodePods(ctx, task, ctx.config.existingNodeIds) } }, + self.getIdentifyExistingNetworkNodesTask(argv), { - title: 'Deploy new network node', - task: async (ctx, task) => { + title: 'Determine new node account number', + task: (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config const values = { hedera: { nodes: [] } } - let maxNum - for (/** @type {NetworkNodeServices} **/ const networkNodeServices of ctx.config.serviceMap.values()) { + let maxNum = 0 + + let lastNodeName = DEFAULT_NETWORK_NODE_NAME + + for (/** @type {NetworkNodeServices} **/ const networkNodeServices of config.serviceMap.values()) { values.hedera.nodes.push({ accountId: networkNodeServices.accountId, name: networkNodeServices.nodeName @@ -1187,60 +1754,29 @@ export class NodeCommand extends BaseCommand { maxNum = maxNum > AccountId.fromString(networkNodeServices.accountId).num ? maxNum : AccountId.fromString(networkNodeServices.accountId).num - } - for (const nodeId of ctx.config.nodeIds) { - const accountId = AccountId.fromString(values.hedera.nodes[0].accountId) - accountId.num = ++maxNum - values.hedera.nodes.push({ - accountId: accountId.toString(), - name: nodeId - }) + lastNodeName = networkNodeServices.nodeName } - let valuesArg = '' - let index = 0 - for (const node of values.hedera.nodes) { - valuesArg += ` --set "hedera.nodes[${index}].accountId=${node.accountId}" --set "hedera.nodes[${index}].name=${node.name}"` - index++ + const lastNodeNumberMatch = lastNodeName.match(/\d+$/) + if (lastNodeNumberMatch.length) { + const incremented = parseInt(lastNodeNumberMatch[0]) + 1 + lastNodeName = lastNodeName.replace(/\d+$/, incremented.toString()) } - await self.chartManager.upgrade( - ctx.config.namespace, - constants.FULLSTACK_DEPLOYMENT_CHART, - ctx.config.chartPath, - valuesArg, - ctx.config.fstChartVersion - ) - ctx.config.allNodeIds = [...ctx.config.existingNodeIds, ...ctx.config.nodeIds] - } - }, - { - title: 'Check new network node pod is running', - task: async (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - subTasks.push({ - title: `Check new network pod: ${chalk.yellow(nodeId)}`, - task: async (ctx) => { - ctx.config.podNames[nodeId] = await this.checkNetworkNodePod(ctx.config.namespace, nodeId) - } - }) + ctx.maxNum = maxNum + ctx.newNode = { + accountId: `${constants.HEDERA_NODE_ACCOUNT_ID_START.realm}.${constants.HEDERA_NODE_ACCOUNT_ID_START.shard}.${++maxNum}`, + name: lastNodeName } - - // setup the sub-tasks - return task.newListr(subTasks, { - concurrent: true, - rendererOptions: { - collapseSubtasks: false - } - }) + config.nodeId = lastNodeName + config.allNodeIds.push(lastNodeName) } }, { - title: 'Generate Gossip keys', + title: 'Generate Gossip key', task: async (ctx, parentTask) => { - const config = ctx.config - const subTasks = self._nodeGossipKeysTaskList(config.keyFormat, config.nodeIds, config.keysDir, config.curDate, config.allNodeIds) + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const subTasks = self.keyManager.taskGenerateGossipKeys(self.keytoolDepManager, [config.nodeId], config.keysDir, config.curDate, config.allNodeIds) // set up the sub-tasks return parentTask.newListr(subTasks, { concurrent: false, @@ -1253,10 +1789,10 @@ export class NodeCommand extends BaseCommand { skip: (ctx, _) => !ctx.config.generateGossipKeys }, { - title: 'Generate gRPC TLS keys', + title: 'Generate gRPC TLS key', task: async (ctx, parentTask) => { - const config = ctx.config - const subTasks = self._nodeTlsKeyTaskList(config.nodeIds, config.keysDir, config.curDate) + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const subTasks = self.keyManager.taskGenerateTLSKeys([config.nodeId], config.keysDir, config.curDate) // set up the sub-tasks return parentTask.newListr(subTasks, { concurrent: false, @@ -1269,330 +1805,575 @@ export class NodeCommand extends BaseCommand { skip: (ctx, _) => !ctx.config.generateTlsKeys }, { - title: 'Prepare staging directory', - task: async (ctx, parentTask) => { - const config = ctx.config - const subTasks = [ - { - title: 'Copy configuration files', - task: () => { - for (const flag of flags.nodeConfigFileFlags.values()) { - const filePath = self.configManager.getFlag(flag) - if (!filePath) { - throw new FullstackTestingError(`Configuration file path is missing for: ${flag.name}`) - } - - const fileName = path.basename(filePath) - const destPath = `${config.stagingDir}/templates/${fileName}` - self.logger.debug(`Copying configuration file to staging: ${filePath} -> ${destPath}`) - - fs.cpSync(filePath, destPath, { force: true }) - } - } - }, - { - title: 'Copy Gossip keys to staging', - task: async (ctx, _) => { - const config = ctx.config - - await this.copyGossipKeysToStaging(config, ctx.config.allNodeIds) - } - }, - { - title: 'Copy gRPC TLS keys to staging', - task: async (ctx, _) => { - const config = ctx.config - for (const nodeId of ctx.config.allNodeIds) { - const tlsKeyFiles = self.keyManager.prepareTLSKeyFilePaths(nodeId, config.keysDir) - await self._copyNodeKeys(tlsKeyFiles, config.stagingKeysDir) - } - } - }, - { - title: 'Prepare config.txt for the network', - task: async (ctx, _) => { - const config = ctx.config - const configTxtPath = `${config.stagingDir}/config.txt` - const template = `${constants.RESOURCES_DIR}/templates/config.template` - await self.platformInstaller.prepareConfigTxt(config.allNodeIds, configTxtPath, config.releaseTag, config.chainId, template) - } - } - ] - - return parentTask.newListr(subTasks, { - concurrent: false, - rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION - }) + title: 'Load signing key certificate', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const signingCertFile = Templates.renderGossipPemPublicKeyFile(constants.SIGNING_KEY_PREFIX, config.nodeId) + const signingCertFullPath = path.join(config.keysDir, signingCertFile) + ctx.signingCertDer = await this.loadPermCertificate(signingCertFullPath) } }, { - title: 'Fetch platform software into network nodes', - task: - async (ctx, task) => { - return self.fetchPlatformSoftware(ctx, task, self.platformInstaller) - } - }, - { - title: 'Freeze network nodes', - task: - async (ctx, task) => { - await this.freezeNetworkNodes(ctx.config) - } + title: 'Compute mTLS certificate hash', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const tlsCertFile = Templates.renderTLSPemPublicKeyFile(config.nodeId) + const tlsCertFullPath = path.join(config.keysDir, tlsCertFile) + const tlsCertDer = await this.loadPermCertificate(tlsCertFullPath) + ctx.tlsCertHash = crypto.createHash('sha384').update(tlsCertDer).digest() + } }, { - title: 'Check nodes are frozen', + title: 'Prepare gossip endpoints', task: (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.existingNodeIds) { - subTasks.push({ - title: `Check node: ${chalk.yellow(nodeId)}`, - task: () => self.checkNetworkNodeState(nodeId, 100, 'FREEZE_COMPLETE') - }) - } - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false + const config = /** @type {NodeAddConfigClass} **/ ctx.config + let endpoints = [] + if (!config.gossipEndpoints) { + if (config.endpointType !== constants.ENDPOINT_TYPE_FQDN) { + throw new FullstackTestingError(`--gossip-endpoints must be set if --endpoint-type is: ${constants.ENDPOINT_TYPE_IP}`) } - }) - } - }, - { - title: 'Setup network nodes', - task: async (ctx, parentTask) => { - const config = ctx.config - - // modify application.properties to trick Hedera Services into receiving an updated address book - await self.bumpHederaConfigVersion(`${config.stagingDir}/templates/application.properties`) - const subTasks = [] - for (const nodeId of config.allNodeIds) { - const podName = config.podNames[nodeId] - subTasks.push({ - title: `Node: ${chalk.yellow(nodeId)}`, - task: () => - self.platformInstaller.taskInstall(podName, config.buildZipFile, config.stagingDir, config.allNodeIds, config.keyFormat, config.force) - }) + endpoints = [ + `${Templates.renderFullyQualifiedNetworkPodName(config.namespace, config.nodeId)}:${constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT}`, + `${Templates.renderFullyQualifiedNetworkSvcName(config.namespace, config.nodeId)}:${constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT}` + ] + } else { + endpoints = helpers.splitFlagInput(config.gossipEndpoints) } - // set up the sub-tasks - return parentTask.newListr(subTasks, { - concurrent: true, - rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION - }) + ctx.gossipEndpoints = this.prepareEndpoints(config.endpointType, endpoints, constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT) } }, { - title: 'Starting nodes', + title: 'Prepare grpc service endpoints', task: (ctx, task) => { - const subTasks = [] - self.startNodes(ctx.config, ctx.config.allNodeIds, subTasks) + const config = /** @type {NodeAddConfigClass} **/ ctx.config + let endpoints = [] - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: true, - rendererOptions: { - collapseSubtasks: false, - timer: constants.LISTR_DEFAULT_RENDERER_TIMER_OPTION + if (!config.grpcEndpoints) { + if (config.endpointType !== constants.ENDPOINT_TYPE_FQDN) { + throw new FullstackTestingError(`--grpc-endpoints must be set if --endpoint-type is: ${constants.ENDPOINT_TYPE_IP}`) } - }) - } - }, - { - title: 'Check nodes are ACTIVE', - task: (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.allNodeIds) { - subTasks.push({ - title: `Check node: ${chalk.yellow(nodeId)}`, - task: () => self.checkNetworkNodeState(nodeId, 200) - }) + + endpoints = [ + `${Templates.renderFullyQualifiedNetworkSvcName(config.namespace, config.nodeId)}:${constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT}` + ] + } else { + endpoints = helpers.splitFlagInput(config.grpcEndpoints) } - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false - } - }) + ctx.grpcServiceEndpoints = this.prepareEndpoints(config.endpointType, endpoints, constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT) } }, { - title: 'Check node proxies are ACTIVE', - // this is more reliable than checking the nodes logs for ACTIVE, as the - // logs will have a lot of white noise from being behind + title: 'Prepare upgrade zip file for node upgrade process', task: async (ctx, task) => { - const subTasks = [] - for (const nodeId of ctx.config.nodeIds) { - subTasks.push({ - title: `Check proxy for node: ${chalk.yellow(nodeId)}`, - task: async () => await self.k8.waitForPodReady( - [`app=haproxy-${nodeId}`, 'fullstack.hedera.com/type=haproxy'], - 1, 300, 2000) - }) - } - - // set up the sub-tasks - return task.newListr(subTasks, { - concurrent: false, - rendererOptions: { - collapseSubtasks: false - } - }) + const config = /** @type {NodeAddConfigClass} **/ ctx.config + ctx.upgradeZipFile = await this.prepareUpgradeZip(config.stagingDir) + ctx.upgradeZipHash = await this.uploadUpgradeZip(ctx.upgradeZipFile, config.nodeClient) } }, { - title: 'Finalize', - task: (ctx, _) => { - // reset flags so that keys are not regenerated later - self.configManager.setFlag(flags.generateGossipKeys, false) - self.configManager.setFlag(flags.generateTlsKeys, false) - self.configManager.persist() + title: 'Check existing nodes staked amount', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await this.checkStakingTask(config.existingNodeIds) } } - ], { - concurrent: false, - rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION - }) - - try { - await tasks.run() - } catch (e) { - throw new FullstackTestingError(`Error in setting up nodes: ${e.message}`, e) - } finally { - await self.close() - } - - return true + ] } - async freezeNetworkNodes (config) { - await this.accountManager.loadNodeClient(config.namespace) - const client = this.accountManager._nodeClient - - try { - // fetch special file - const fileId = FileId.fromString('0.0.150') - const fileQuery = new FileContentsQuery().setFileId(fileId) - const addressBookBytes = await fileQuery.execute(client) - const fileHash = crypto.createHash('sha384').update(addressBookBytes).digest('hex') - - const prepareUpgradeTx = await new FreezeTransaction() - .setFreezeType(FreezeType.PrepareUpgrade) - .setFileId(fileId) - .setFileHash(fileHash) - .freezeWith(client) - .execute(client) - - const prepareUpgradeReceipt = await prepareUpgradeTx.getReceipt(client) - - this.logger.debug( - `Upgrade prepared with transaction id: ${prepareUpgradeTx.transactionId.toString()}`, - prepareUpgradeReceipt.status.toString() - ) - - const futureDate = new Date() - this.logger.debug(`Current time: ${futureDate}`) - - futureDate.setTime(futureDate.getTime() + 20000) // 20 seconds in the future - this.logger.debug(`Freeze time: ${futureDate}`) - - const freezeUpgradeTx = await new FreezeTransaction() - .setFreezeType(FreezeType.FreezeUpgrade) - .setStartTimestamp(Timestamp.fromDate(futureDate)) - .setFileId(fileId) - .setFileHash(fileHash) - .freezeWith(client) - .execute(client) + saveContextDataTask (argv) { + return { + title: 'Save context data', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const outputDir = argv[flags.outputDir.name] + if (!outputDir) { + throw new FullstackTestingError(`Path to export context data not specified. Please set a value for --${flags.outputDir.name}`) + } - const freezeUpgradeReceipt = await freezeUpgradeTx.getReceipt(client) + if (!fs.existsSync(outputDir)) { + fs.mkdirSync(outputDir, { recursive: true }) + } + const exportedFields = [ + 'tlsCertHash', + 'upgradeZipHash', + 'newNode' + ] + const exportedCtx = {} + + exportedCtx.signingCertDer = ctx.signingCertDer.toString() + exportedCtx.gossipEndpoints = ctx.gossipEndpoints.map(ep => `${ep.getDomainName}:${ep.getPort}`) + exportedCtx.grpcServiceEndpoints = ctx.grpcServiceEndpoints.map(ep => `${ep.getDomainName}:${ep.getPort}`) + exportedCtx.adminKey = ctx.adminKey.toString() + exportedCtx.existingNodeIds = config.existingNodeIds + + for (const prop of exportedFields) { + exportedCtx[prop] = ctx[prop] + } - this.logger.debug(`Upgrade frozen with transaction id: ${freezeUpgradeTx.transactionId.toString()}`, - freezeUpgradeReceipt.status.toString()) - } catch (e) { - this.logger.error(`Error in freeze upgrade: ${e.message}`, e) - throw new FullstackTestingError(`Error in freeze upgrade: ${e.message}`, e) + fs.writeFileSync(path.join(outputDir, 'ctx.json'), JSON.stringify(exportedCtx)) + } } } - startNodes (config, nodeIds, subTasks) { - for (const nodeId of nodeIds) { - const podName = config.podNames[nodeId] - subTasks.push({ - title: `Start node: ${chalk.yellow(nodeId)}`, - task: async () => { - await this.k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -rf ${constants.HEDERA_HAPI_PATH}/output/*`]) - await this.k8.execContainer(podName, constants.ROOT_CONTAINER, ['systemctl', 'restart', 'network-node']) + loadContextDataTask (argv) { + return { + title: 'Load context data', + task: async (ctx, task) => { + if (argv.importCtxData) { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const inputDir = argv[flags.inputDir.name] + if (!inputDir) { + throw new FullstackTestingError(`Path to context data not specified. Please set a value for --${flags.inputDir.name}`) + } + const ctxData = JSON.parse(fs.readFileSync(path.join(inputDir, 'ctx.json'))) + + ctx.signingCertDer = new Uint8Array(ctxData.signingCertDer.split(',')) + ctx.gossipEndpoints = this.prepareEndpoints(ctx.config.endpointType, ctxData.gossipEndpoints, constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT) + ctx.grpcServiceEndpoints = this.prepareEndpoints(ctx.config.endpointType, ctxData.grpcServiceEndpoints, constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT) + ctx.adminKey = PrivateKey.fromStringED25519(ctxData.adminKey) + config.nodeId = ctxData.newNode.name + config.existingNodeIds = ctxData.existingNodeIds + config.allNodeIds = [...config.existingNodeIds, ctxData.newNode.name] + + const fieldsToImport = [ + 'tlsCertHash', + 'upgradeZipHash', + 'newNode' + ] + + for (const prop of fieldsToImport) { + ctx[prop] = ctxData[prop] + } } - }) + } } } - async copyGossipKeysToStaging (config, nodeIds) { - // copy gossip keys to the staging - for (const nodeId of nodeIds) { - switch (config.keyFormat) { - case constants.KEY_FORMAT_PEM: { - const signingKeyFiles = this.keyManager.prepareNodeKeyFilePaths(nodeId, config.keysDir, constants.SIGNING_KEY_PREFIX) - await this._copyNodeKeys(signingKeyFiles, config.stagingKeysDir) - - // generate missing agreement keys - const agreementKeyFiles = this.keyManager.prepareNodeKeyFilePaths(nodeId, config.keysDir, constants.AGREEMENT_KEY_PREFIX) - await this._copyNodeKeys(agreementKeyFiles, config.stagingKeysDir) - break + getAddTransactionTasks (argv) { + return [ + { + title: 'Send node create transaction', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + + try { + const nodeCreateTx = await new NodeCreateTransaction() + .setAccountId(ctx.newNode.accountId) + .setGossipEndpoints(ctx.gossipEndpoints) + .setServiceEndpoints(ctx.grpcServiceEndpoints) + .setGossipCaCertificate(ctx.signingCertDer) + .setCertificateHash(ctx.tlsCertHash) + .setAdminKey(ctx.adminKey.publicKey) + .freezeWith(config.nodeClient) + const signedTx = await nodeCreateTx.sign(ctx.adminKey) + const txResp = await signedTx.execute(config.nodeClient) + const nodeCreateReceipt = await txResp.getReceipt(config.nodeClient) + this.logger.debug(`NodeCreateReceipt: ${nodeCreateReceipt.toString()}`) + } catch (e) { + this.logger.error(`Error adding node to network: ${e.message}`, e) + throw new FullstackTestingError(`Error adding node to network: ${e.message}`, e) + } } - - case constants.KEY_FORMAT_PFX: { - const privateKeyFile = Templates.renderGossipPfxPrivateKeyFile(nodeId) - fs.cpSync(`${config.keysDir}/${privateKeyFile}`, `${config.stagingKeysDir}/${privateKeyFile}`) - fs.cpSync(`${config.keysDir}/${constants.PUBLIC_PFX}`, `${config.stagingKeysDir}/${constants.PUBLIC_PFX}`) - break + }, + { + title: 'Send prepare upgrade transaction', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await this.prepareUpgradeNetworkNodes(config.freezeAdminPrivateKey, ctx.upgradeZipHash, config.nodeClient) + } + }, + { + title: 'Send freeze upgrade transaction', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await this.freezeUpgradeNetworkNodes(config.freezeAdminPrivateKey, ctx.upgradeZipHash, config.nodeClient) } - - default: - throw new FullstackTestingError(`Unsupported key-format ${config.keyFormat}`) } - } + + ] } - /** - * Return Yargs command definition for 'node' command - * @param nodeCmd an instance of NodeCommand - */ + getAddExecuteTasks (argv) { + const self = this + + return [ + { + title: 'Download generated files from an existing node', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await this.downloadNodeGeneratedFiles(config) + } + }, + { + title: 'Prepare staging directory', + task: async (ctx, parentTask) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + return this.prepareStagingTask(ctx, parentTask, config.keysDir, config.stagingKeysDir, config.allNodeIds) + } + }, + { + title: 'Copy node keys to secrets', + task: async (ctx, parentTask) => { + return this.copyNodeKeyTask(ctx, parentTask) + } + }, + { + title: 'Check network nodes are frozen', + task: (ctx, task) => { + return this.checkNodeActivenessTask(ctx, task, ctx.config.existingNodeIds, NodeStatusCodes.FREEZE_COMPLETE) + } + }, + { + title: 'Get node logs and configs', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await helpers.getNodeLogs(self.k8, config.namespace) + } + }, + { + title: 'Deploy new network node', + task: async (ctx, task) => { + await this.chartUpdateTask(ctx) + } + }, + { + title: 'Kill nodes to pick up updated configMaps', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + for (const /** @type {NetworkNodeServices} **/ service of config.serviceMap.values()) { + await self.k8.kubeClient.deleteNamespacedPod(service.nodePodName, config.namespace, undefined, undefined, 1) + } + } + }, + { + title: 'Check node pods are running', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + return this.checkPodRunningTask(ctx, task, config.allNodeIds) + } + }, + { + title: 'Fetch platform software into all network nodes', + task: + async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + config.serviceMap = await self.accountManager.getNodeServiceMap( + config.namespace) + config.podNames[config.nodeId] = config.serviceMap.get(config.nodeId).nodePodName + + return self.fetchLocalOrReleasedPlatformSoftware(config.allNodeIds, config.podNames, config.releaseTag, task, config.localBuildPath) + } + }, + { + title: 'Download last state from an existing node', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const node1FullyQualifiedPodName = Templates.renderNetworkPodName(config.existingNodeIds[0]) + const upgradeDirectory = `${constants.HEDERA_HAPI_PATH}/data/saved/com.hedera.services.ServicesMain/0/123` + // zip the contents of the newest folder on node1 within /opt/hgcapp/services-hedera/HapiApp2.0/data/saved/com.hedera.services.ServicesMain/0/123/ + const zipFileName = await self.k8.execContainer(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, ['bash', '-c', `cd ${upgradeDirectory} && mapfile -t states < <(ls -1t .) && jar cf "\${states[0]}.zip" -C "\${states[0]}" . && echo -n \${states[0]}.zip`]) + await self.k8.copyFrom(node1FullyQualifiedPodName, constants.ROOT_CONTAINER, `${upgradeDirectory}/${zipFileName}`, config.stagingDir) + config.lastStateZipPath = path.join(config.stagingDir, zipFileName) + } + }, + { + title: 'Upload last saved state to new network node', + task: + async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + const newNodeFullyQualifiedPodName = Templates.renderNetworkPodName(config.nodeId) + const nodeNumber = Templates.nodeNumberFromNodeId(config.nodeId) + const savedStateDir = (config.lastStateZipPath.match(/\/(\d+)\.zip$/))[1] + const savedStatePath = `${constants.HEDERA_HAPI_PATH}/data/saved/com.hedera.services.ServicesMain/${nodeNumber}/123/${savedStateDir}` + await self.k8.execContainer(newNodeFullyQualifiedPodName, constants.ROOT_CONTAINER, ['bash', '-c', `mkdir -p ${savedStatePath}`]) + await self.k8.copyTo(newNodeFullyQualifiedPodName, constants.ROOT_CONTAINER, config.lastStateZipPath, savedStatePath) + await self.platformInstaller.setPathPermission(newNodeFullyQualifiedPodName, constants.HEDERA_HAPI_PATH) + await self.k8.execContainer(newNodeFullyQualifiedPodName, constants.ROOT_CONTAINER, ['bash', '-c', `cd ${savedStatePath} && jar xf ${path.basename(config.lastStateZipPath)} && rm -f ${path.basename(config.lastStateZipPath)}`]) + } + }, + { + title: 'Setup new network node', + task: async (ctx, parentTask) => { + return this.setupNodesTask(ctx, parentTask, ctx.config.allNodeIds) + } + }, + { + title: 'Start network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + return this.startNetworkNodesTask(task, config.podNames, config.allNodeIds) + } + }, + { + title: 'Enable port forwarding for JVM debugger', + task: async (ctx, _) => { + await this.enableJVMPortForwarding(ctx.config.debugNodeId) + }, + skip: (ctx, _) => !ctx.config.debugNodeId + }, + { + title: 'Check all nodes are ACTIVE', + task: async (ctx, task) => { + return this.checkNodeActivenessTask(ctx, task, ctx.config.allNodeIds) + } + }, + { + title: 'Check all node proxies are ACTIVE', + // this is more reliable than checking the nodes logs for ACTIVE, as the + // logs will have a lot of white noise from being behind + task: async (ctx, task) => { + return this.checkNodesProxiesTask(ctx, task, ctx.config.allNodeIds) + } + }, + { + title: 'Stake new node', + task: async (ctx, _) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await self.addStake(config.namespace, ctx.newNode.accountId, config.nodeId) + } + }, + { + title: 'Trigger stake weight calculate', + task: async (ctx, task) => { + const config = /** @type {NodeAddConfigClass} **/ ctx.config + await this.triggerStakeCalculation(config) + } + }, + { + title: 'Finalize', + task: (ctx, _) => { + // reset flags so that keys are not regenerated later + self.configManager.setFlag(flags.generateGossipKeys, false) + self.configManager.setFlag(flags.generateTlsKeys, false) + self.configManager.persist() + } + } + ] + } + + async addPrepare (argv) { + const self = this + const prepareTasks = this.getAddPrepareTasks(argv) + const tasks = new Listr([ + ...prepareTasks, + self.saveContextDataTask(argv) + ], { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + + try { + await tasks.run() + } catch (e) { + self.logger.error(`Error in setting up nodes: ${e.message}`, e) + throw new FullstackTestingError(`Error in setting up nodes: ${e.message}`, e) + } finally { + await self.close() + } + + return true + } + + async addSubmitTransactions (argv) { + const self = this + + argv.importCtxData = true + const transactionTasks = this.getAddTransactionTasks(argv) + const tasks = new Listr([ + self.addInitializeTask(argv), + self.loadContextDataTask(argv), + ...transactionTasks + ], { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + + try { + await tasks.run() + } catch (e) { + self.logger.error(`Error in submitting transactions to node: ${e.message}`, e) + throw new FullstackTestingError(`Error in submitting transactions to up node: ${e.message}`, e) + } finally { + await self.close() + } + + return true + } + + async addExecute (argv) { + const self = this + + argv.importCtxData = true + const executeTasks = this.getAddExecuteTasks(argv) + const tasks = new Listr([ + self.addInitializeTask(argv), + self.getIdentifyExistingNetworkNodesTask(argv), + self.loadContextDataTask(argv), + ...executeTasks + ], { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + + try { + await tasks.run() + } catch (e) { + self.logger.error(`Error in starting up nodes: ${e.message}`, e) + throw new FullstackTestingError(`Error in setting up nodes: ${e.message}`, e) + } finally { + await self.close() + } + + return true + } + + /** + * @param {Object} argv + * @returns {Promise} + */ + async add (argv) { + const self = this + + const prepareTasks = this.getAddPrepareTasks(argv) + const transactionTasks = this.getAddTransactionTasks(argv) + const executeTasks = this.getAddExecuteTasks(argv) + const tasks = new Listr([ + ...prepareTasks, + ...transactionTasks, + ...executeTasks + ], { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + + try { + await tasks.run() + } catch (e) { + self.logger.error(`Error in adding nodes: ${e.message}`, e) + throw new FullstackTestingError(`Error in adding nodes: ${e.message}`, e) + } finally { + await self.close() + } + + return true + } + + /** + * @param {PrivateKey|string} freezeAdminPrivateKey + * @param {Uint8Array|string} upgradeZipHash + * @param {NodeClient} client + * @returns {Promise} + */ + async prepareUpgradeNetworkNodes (freezeAdminPrivateKey, upgradeZipHash, client) { + try { + // transfer some tiny amount to the freeze admin account + await this.accountManager.transferAmount(constants.TREASURY_ACCOUNT_ID, FREEZE_ADMIN_ACCOUNT, 100000) + + // query the balance + const balance = await new AccountBalanceQuery() + .setAccountId(FREEZE_ADMIN_ACCOUNT) + .execute(this.accountManager._nodeClient) + this.logger.debug(`Freeze admin account balance: ${balance.hbars}`) + + // set operator of freeze transaction as freeze admin account + client.setOperator(FREEZE_ADMIN_ACCOUNT, freezeAdminPrivateKey) + + const prepareUpgradeTx = await new FreezeTransaction() + .setFreezeType(FreezeType.PrepareUpgrade) + .setFileId(constants.UPGRADE_FILE_ID) + .setFileHash(upgradeZipHash) + .freezeWith(client) + .execute(client) + + const prepareUpgradeReceipt = await prepareUpgradeTx.getReceipt(client) + + this.logger.debug( + `sent prepare upgrade transaction [id: ${prepareUpgradeTx.transactionId.toString()}]`, + prepareUpgradeReceipt.status.toString() + ) + } catch (e) { + this.logger.error(`Error in prepare upgrade: ${e.message}`, e) + throw new FullstackTestingError(`Error in prepare upgrade: ${e.message}`, e) + } + } + + /** + * @param {PrivateKey|string} freezeAdminPrivateKey + * @param {Uint8Array|string} upgradeZipHash + * @param {NodeClient} client + * @returns {Promise} + */ + async freezeUpgradeNetworkNodes (freezeAdminPrivateKey, upgradeZipHash, client) { + try { + const futureDate = new Date() + this.logger.debug(`Current time: ${futureDate}`) + + futureDate.setTime(futureDate.getTime() + 5000) // 5 seconds in the future + this.logger.debug(`Freeze time: ${futureDate}`) + + client.setOperator(FREEZE_ADMIN_ACCOUNT, freezeAdminPrivateKey) + const freezeUpgradeTx = await new FreezeTransaction() + .setFreezeType(FreezeType.FreezeUpgrade) + .setStartTimestamp(Timestamp.fromDate(futureDate)) + .setFileId(constants.UPGRADE_FILE_ID) + .setFileHash(upgradeZipHash) + .freezeWith(client) + .execute(client) + + const freezeUpgradeReceipt = await freezeUpgradeTx.getReceipt(client) + this.logger.debug(`Upgrade frozen with transaction id: ${freezeUpgradeTx.transactionId.toString()}`, + freezeUpgradeReceipt.status.toString()) + } catch (e) { + this.logger.error(`Error in freeze upgrade: ${e.message}`, e) + throw new FullstackTestingError(`Error in freeze upgrade: ${e.message}`, e) + } + } + + async enableJVMPortForwarding (nodeId) { + const podName = `network-${nodeId}-0` + this.logger.debug(`Enable port forwarding for JVM debugger on pod ${podName}`) + await this.k8.portForward(podName, constants.JVM_DEBUG_PORT, constants.JVM_DEBUG_PORT) + } + + /** + * @param {Object} podNames + * @param {string} nodeIds + * @param {Object[]} subTasks + */ + startNodes (podNames, nodeIds, subTasks) { + for (const nodeId of nodeIds) { + const podName = podNames[nodeId] + subTasks.push({ + title: `Start node: ${chalk.yellow(nodeId)}`, + task: async () => { + await this.k8.execContainer(podName, constants.ROOT_CONTAINER, ['systemctl', 'restart', 'network-node']) + } + }) + } + } + + // Command Definition + /** + * Return Yargs command definition for 'node' command + * @param {NodeCommand} nodeCmd - an instance of NodeCommand + * @returns {{command: string, desc: string, builder: Function}} + */ static getCommandDefinition (nodeCmd) { if (!nodeCmd || !(nodeCmd instanceof NodeCommand)) { throw new IllegalArgumentError('An instance of NodeCommand is required', nodeCmd) } return { command: 'node', - desc: 'Manage Hedera platform node in fullstack testing network', + desc: 'Manage Hedera platform node in solo network', builder: yargs => { return yargs .command({ command: 'setup', desc: 'Setup node with a specific version of Hedera platform', - builder: y => flags.setCommandFlags(y, - flags.apiPermissionProperties, - flags.app, - flags.appConfig, - flags.applicationProperties, - flags.bootstrapProperties, - flags.cacheDir, - flags.chainId, - flags.force, - flags.generateGossipKeys, - flags.generateTlsKeys, - flags.keyFormat, - flags.localBuildPath, - flags.log4j2Xml, - flags.namespace, - flags.nodeIDs, - flags.releaseTag, - flags.settingTxt - ), + builder: y => flags.setCommandFlags(y, ...NodeCommand.SETUP_FLAGS_LIST), handler: argv => { nodeCmd.logger.debug('==== Running \'node setup\' ===') nodeCmd.logger.debug(argv) @@ -1610,6 +2391,8 @@ export class NodeCommand extends BaseCommand { command: 'start', desc: 'Start a node', builder: y => flags.setCommandFlags(y, + flags.app, + flags.debugNodeId, flags.namespace, flags.nodeIDs ), @@ -1649,13 +2432,7 @@ export class NodeCommand extends BaseCommand { .command({ command: 'keys', desc: 'Generate node keys', - builder: y => flags.setCommandFlags(y, - flags.cacheDir, - flags.generateGossipKeys, - flags.generateTlsKeys, - flags.keyFormat, - flags.nodeIDs - ), + builder: y => flags.setCommandFlags(y, ...NodeCommand.KEYS_FLAGS_LIST), handler: argv => { nodeCmd.logger.debug('==== Running \'node keys\' ===') nodeCmd.logger.debug(argv) @@ -1672,13 +2449,7 @@ export class NodeCommand extends BaseCommand { .command({ command: 'refresh', desc: 'Reset and restart a node', - builder: y => flags.setCommandFlags(y, - flags.cacheDir, - flags.keyFormat, - flags.namespace, - flags.nodeIDs, - flags.releaseTag - ), + builder: y => flags.setCommandFlags(y, ...NodeCommand.REFRESH_FLAGS_LIST), handler: argv => { nodeCmd.logger.debug('==== Running \'node refresh\' ===') nodeCmd.logger.debug(argv) @@ -1714,22 +2485,7 @@ export class NodeCommand extends BaseCommand { .command({ command: 'add', desc: 'Adds a node with a specific version of Hedera platform', - builder: y => flags.setCommandFlags(y, - flags.apiPermissionProperties, - flags.applicationProperties, - flags.bootstrapProperties, - flags.cacheDir, - flags.chainId, - flags.force, - flags.generateGossipKeys, - flags.generateTlsKeys, - flags.keyFormat, - flags.log4j2Xml, - flags.namespace, - flags.nodeIDs, - flags.releaseTag, - flags.settingTxt - ), + builder: y => flags.setCommandFlags(y, ...NodeCommand.ADD_FLAGS_LIST), handler: argv => { nodeCmd.logger.debug('==== Running \'node add\' ===') nodeCmd.logger.debug(argv) @@ -1743,22 +2499,804 @@ export class NodeCommand extends BaseCommand { }) } }) - .demandCommand(1, 'Select a node command') - } - } - } - - async bumpHederaConfigVersion (configTxtPath) { - const lines = (await readFile(configTxtPath, 'utf-8')).split('\n') - - for (const line of lines) { - if (line.startsWith('hedera.config.version=')) { - const version = parseInt(line.split('=')[1]) + 1 - lines[lines.indexOf(line)] = `hedera.config.version=${version}` - break - } - } + .command({ + command: 'add-prepare', + desc: 'Prepares the addition of a node with a specific version of Hedera platform', + builder: y => flags.setCommandFlags(y, ...NodeCommand.ADD_PREPARE_FLAGS_LIST), + handler: argv => { + nodeCmd.logger.debug('==== Running \'node add\' ===') + nodeCmd.logger.debug(argv) - await writeFile(configTxtPath, lines.join('\n')) + nodeCmd.addPrepare(argv).then(r => { + nodeCmd.logger.debug('==== Finished running `node add`====') + if (!r) process.exit(1) + }).catch(err => { + nodeCmd.logger.showUserError(err) + process.exit(1) + }) + } + }) + .command({ + command: 'add-submit-transactions', + desc: 'Submits NodeCreateTransaction and Upgrade transactions to the network nodes', + builder: y => flags.setCommandFlags(y, ...NodeCommand.ADD_SUBMIT_TRANSACTIONS_FLAGS_LIST), + handler: argv => { + nodeCmd.logger.debug('==== Running \'node add\' ===') + nodeCmd.logger.debug(argv) + + nodeCmd.addSubmitTransactions(argv).then(r => { + nodeCmd.logger.debug('==== Finished running `node add`====') + if (!r) process.exit(1) + }).catch(err => { + nodeCmd.logger.showUserError(err) + process.exit(1) + }) + } + }) + .command({ + command: 'add-execute', + desc: 'Executes the addition of a previously prepared node', + builder: y => flags.setCommandFlags(y, ...NodeCommand.ADD_EXECUTE_FLAGS_LIST), + handler: argv => { + nodeCmd.logger.debug('==== Running \'node add\' ===') + nodeCmd.logger.debug(argv) + + nodeCmd.addExecute(argv).then(r => { + nodeCmd.logger.debug('==== Finished running `node add`====') + if (!r) process.exit(1) + }).catch(err => { + nodeCmd.logger.showUserError(err) + process.exit(1) + }) + } + }) + .command({ + command: 'update', + desc: 'Update a node with a specific version of Hedera platform', + builder: y => flags.setCommandFlags(y, ...NodeCommand.UPDATE_FLAGS_LIST), + handler: argv => { + nodeCmd.logger.debug('==== Running \'node update\' ===') + nodeCmd.logger.debug(argv) + + nodeCmd.update(argv).then(r => { + nodeCmd.logger.debug('==== Finished running `node update`====') + if (!r) process.exit(1) + }).catch(err => { + nodeCmd.logger.showUserError(err) + process.exit(1) + }) + } + }) + .command({ + command: 'delete', + desc: 'Delete a node with a specific version of Hedera platform', + builder: y => flags.setCommandFlags(y, ...NodeCommand.DELETE_FLAGS_LIST), + handler: argv => { + nodeCmd.logger.debug('==== Running \'node delete\' ===') + nodeCmd.logger.debug(argv) + + nodeCmd.delete(argv).then(r => { + nodeCmd.logger.debug('==== Finished running `node delete`====') + if (!r) process.exit(1) + }).catch(err => { + nodeCmd.logger.showUserError(err) + process.exit(1) + }) + } + }) + .demandCommand(1, 'Select a node command') + } + } + } + + async update (argv) { + const self = this + + const tasks = new Listr([ + { + title: 'Initialize', + task: async (ctx, task) => { + self.configManager.update(argv) + + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.app, + flags.chartDirectory, + flags.devMode, + flags.debugNodeId, + flags.endpointType, + flags.force, + flags.fstChartVersion, + flags.gossipEndpoints, + flags.gossipPrivateKey, + flags.gossipPublicKey, + flags.grpcEndpoints, + flags.localBuildPath, + flags.newAccountNumber, + flags.newAdminKey, + flags.tlsPrivateKey, + flags.tlsPublicKey + ]) + + await prompts.execute(task, self.configManager, NodeCommand.UPDATE_FLAGS_LIST) + + /** + * @typedef {Object} NodeUpdateConfigClass + * -- flags -- + * @property {string} app + * @property {string} cacheDir + * @property {string} chartDirectory + * @property {boolean} devMode + * @property {string} debugNodeId + * @property {string} endpointType + * @property {string} fstChartVersion + * @property {string} gossipEndpoints + * @property {string} gossipPrivateKey + * @property {string} gossipPublicKey + * @property {string} grpcEndpoints + * @property {string} localBuildPath + * @property {string} namespace + * @property {string} newAccountNumber + * @property {string} newAdminKey + * @property {string} nodeId + * @property {string} releaseTag + * @property {string} tlsPrivateKey + * @property {string} tlsPublicKey + * -- extra args -- + * @property {PrivateKey} adminKey + * @property {string[]} allNodeIds + * @property {string} chartPath + * @property {string[]} existingNodeIds + * @property {string} freezeAdminPrivateKey + * @property {string} keysDir + * @property {Object} nodeClient + * @property {Object} podNames + * @property {Map} serviceMap + * @property {string} stagingDir + * @property {string} stagingKeysDir + * @property {PrivateKey} treasuryKey + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + // create a config object for subsequent steps + const config = /** @type {NodeUpdateConfigClass} **/ this.getConfig(NodeCommand.UPDATE_CONFIGS_NAME, NodeCommand.UPDATE_FLAGS_LIST, + [ + 'allNodeIds', + 'existingNodeIds', + 'freezeAdminPrivateKey', + 'keysDir', + 'nodeClient', + 'podNames', + 'serviceMap', + 'stagingDir', + 'stagingKeysDir', + 'treasuryKey' + ]) + + config.curDate = new Date() + config.existingNodeIds = [] + + await self.initializeSetup(config, self.k8) + + // set config in the context for later tasks to use + ctx.config = config + + ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDirectory, + constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) + + // initialize Node Client with existing network nodes prior to adding the new node which isn't functioning, yet + ctx.config.nodeClient = await this.accountManager.loadNodeClient(ctx.config.namespace) + + const accountKeys = await this.accountManager.getAccountKeysFromSecret(FREEZE_ADMIN_ACCOUNT, config.namespace) + config.freezeAdminPrivateKey = accountKeys.privateKey + + const treasuryAccount = await this.accountManager.getTreasuryAccountKeys(config.namespace) + const treasuryAccountPrivateKey = treasuryAccount.privateKey + config.treasuryKey = PrivateKey.fromStringED25519(treasuryAccountPrivateKey) + + self.logger.debug('Initialized config', { config }) + } + }, + { + title: 'Identify existing network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + return this.identifyExistingNetworkNodes(ctx, task, config) + } + }, + { + title: 'Prepare gossip endpoints', + task: (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + let endpoints = [] + if (!config.gossipEndpoints) { + if (config.endpointType !== constants.ENDPOINT_TYPE_FQDN) { + throw new FullstackTestingError(`--gossip-endpoints must be set if --endpoint-type is: ${constants.ENDPOINT_TYPE_IP}`) + } + + endpoints = [ + `${Templates.renderFullyQualifiedNetworkPodName(config.namespace, config.nodeId)}:${constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT}`, + `${Templates.renderFullyQualifiedNetworkSvcName(config.namespace, config.nodeId)}:${constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT}` + ] + } else { + endpoints = helpers.splitFlagInput(config.gossipEndpoints) + } + + ctx.gossipEndpoints = this.prepareEndpoints(config.endpointType, endpoints, constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT) + } + }, + { + title: 'Prepare grpc service endpoints', + task: (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + let endpoints = [] + + if (!config.grpcEndpoints) { + if (config.endpointType !== constants.ENDPOINT_TYPE_FQDN) { + throw new FullstackTestingError(`--grpc-endpoints must be set if --endpoint-type is: ${constants.ENDPOINT_TYPE_IP}`) + } + + endpoints = [ + `${Templates.renderFullyQualifiedNetworkSvcName(config.namespace, config.nodeId)}:${constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT}` + ] + } else { + endpoints = helpers.splitFlagInput(config.grpcEndpoints) + } + + ctx.grpcServiceEndpoints = this.prepareEndpoints(config.endpointType, endpoints, constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT) + } + }, + { + title: 'Load node admin key', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + config.adminKey = PrivateKey.fromStringED25519(constants.GENESIS_KEY) + } + }, + { + title: 'Prepare upgrade zip file for node upgrade process', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + ctx.upgradeZipFile = await this.prepareUpgradeZip(config.stagingDir) + ctx.upgradeZipHash = await this.uploadUpgradeZip(ctx.upgradeZipFile, config.nodeClient) + } + }, + { + title: 'Check existing nodes staked amount', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + await this.checkStakingTask(config.existingNodeIds) + } + }, + { + title: 'Send node update transaction', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + + const nodeId = Templates.nodeNumberFromNodeId(config.nodeId) - 1 + self.logger.info(`nodeId: ${nodeId}`) + self.logger.info(`config.newAccountNumber: ${config.newAccountNumber}`) + + try { + const nodeUpdateTx = await new NodeUpdateTransaction() + .setNodeId(nodeId) + + if (config.tlsPublicKey && config.tlsPrivateKey) { + self.logger.info(`config.tlsPublicKey: ${config.tlsPublicKey}`) + const tlsCertDer = await this.loadPermCertificate(config.tlsPublicKey) + const tlsCertHash = crypto.createHash('sha384').update(tlsCertDer).digest() + nodeUpdateTx.setCertificateHash(tlsCertHash) + + const publicKeyFile = Templates.renderTLSPemPublicKeyFile(config.nodeId) + const privateKeyFile = Templates.renderTLSPemPrivateKeyFile(config.nodeId) + renameAndCopyFile(config.tlsPublicKey, publicKeyFile, config.keysDir) + renameAndCopyFile(config.tlsPrivateKey, privateKeyFile, config.keysDir) + } + + if (config.gossipPublicKey && config.gossipPrivateKey) { + self.logger.info(`config.gossipPublicKey: ${config.gossipPublicKey}`) + const signingCertDer = await this.loadPermCertificate(config.gossipPublicKey) + nodeUpdateTx.setGossipCaCertificate(signingCertDer) + + const publicKeyFile = Templates.renderGossipPemPublicKeyFile(constants.SIGNING_KEY_PREFIX, config.nodeId) + const privateKeyFile = Templates.renderGossipPemPrivateKeyFile(constants.SIGNING_KEY_PREFIX, config.nodeId) + renameAndCopyFile(config.gossipPublicKey, publicKeyFile, config.keysDir) + renameAndCopyFile(config.gossipPrivateKey, privateKeyFile, config.keysDir) + } + + if (config.newAccountNumber) { + nodeUpdateTx.setAccountId(config.newAccountNumber) + } + + let parsedNewKey + if (config.newAdminKey) { + parsedNewKey = PrivateKey.fromStringED25519(config.newAdminKey) + nodeUpdateTx.setAdminKey(parsedNewKey.publicKey) + } + await nodeUpdateTx.freezeWith(config.nodeClient) + + // config.adminKey contains the original key, needed to sign the transaction + if (config.newAdminKey) { + await nodeUpdateTx.sign(parsedNewKey) + } + const signedTx = await nodeUpdateTx.sign(config.adminKey) + const txResp = await signedTx.execute(config.nodeClient) + const nodeUpdateReceipt = await txResp.getReceipt(config.nodeClient) + this.logger.debug(`NodeUpdateReceipt: ${nodeUpdateReceipt.toString()}`) + } catch (e) { + this.logger.error(`Error updating node to network: ${e.message}`, e) + this.logger.error(e.stack) + throw new FullstackTestingError(`Error updating node to network: ${e.message}`, e) + } + } + }, + { + title: 'Send prepare upgrade transaction', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + await this.prepareUpgradeNetworkNodes(config.freezeAdminPrivateKey, ctx.upgradeZipHash, config.nodeClient) + } + }, + { + title: 'Download generated files from an existing node', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + await this.downloadNodeGeneratedFiles(config) + } + }, + { + title: 'Send freeze upgrade transaction', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + await this.freezeUpgradeNetworkNodes(config.freezeAdminPrivateKey, ctx.upgradeZipHash, config.nodeClient) + } + }, + { + title: 'Prepare staging directory', + task: async (ctx, parentTask) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + return this.prepareStagingTask(ctx, parentTask, config.keysDir, config.stagingKeysDir, config.allNodeIds) + } + }, + { + title: 'Copy node keys to secrets', + task: async (ctx, parentTask) => { + return this.copyNodeKeyTask(ctx, parentTask) + } + }, + { + title: 'Check network nodes are frozen', + task: (ctx, task) => { + return this.checkNodeActivenessTask(ctx, task, ctx.config.existingNodeIds, NodeStatusCodes.FREEZE_COMPLETE) + } + }, + { + title: 'Get node logs and configs', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + await helpers.getNodeLogs(self.k8, config.namespace) + } + }, + { + title: 'Update chart to use new configMap due to account number change', + task: async (ctx, task) => { + await this.chartUpdateTask(ctx) + }, + // no need to run this step if the account number is not changed, since config.txt will be the same + skip: (ctx, _) => !ctx.config.newAccountNumber && !ctx.config.debugNodeId + }, + { + title: 'Kill nodes to pick up updated configMaps', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + // the updated node will have a new pod ID if its account ID changed which is a label + config.serviceMap = await self.accountManager.getNodeServiceMap( + config.namespace) + for (const /** @type {NetworkNodeServices} **/ service of config.serviceMap.values()) { + await self.k8.kubeClient.deleteNamespacedPod(service.nodePodName, config.namespace, undefined, undefined, 1) + } + self.logger.info('sleep for 15 seconds to give time for pods to finish terminating') + await sleep(15000) + + // again, the pod names will change after the pods are killed + config.serviceMap = await self.accountManager.getNodeServiceMap( + config.namespace) + config.podNames = {} + for (const service of config.serviceMap.values()) { + config.podNames[service.nodeName] = service.nodePodName + } + } + }, + { + title: 'Check node pods are running', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + return this.checkPodRunningTask(ctx, task, config.allNodeIds) + } + }, + { + title: 'Fetch platform software into network nodes', + task: + async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + return self.fetchLocalOrReleasedPlatformSoftware(config.allNodeIds, config.podNames, config.releaseTag, task, config.localBuildPath) + } + }, + { + title: 'Setup network nodes', + task: async (ctx, parentTask) => { + return this.setupNodesTask(ctx, parentTask, ctx.config.allNodeIds) + } + }, + { + title: 'Start network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + return this.startNetworkNodesTask(task, config.podNames, config.allNodeIds) + } + }, + { + title: 'Enable port forwarding for JVM debugger', + task: async (ctx, _) => { + await this.enableJVMPortForwarding(ctx.config.debugNodeId) + }, + skip: (ctx, _) => !ctx.config.debugNodeId + }, + { + title: 'Check all nodes are ACTIVE', + task: async (ctx, task) => { + return this.checkNodeActivenessTask(ctx, task, ctx.config.allNodeIds) + } + }, + { + title: 'Check all node proxies are ACTIVE', + // this is more reliable than checking the nodes logs for ACTIVE, as the + // logs will have a lot of white noise from being behind + task: async (ctx, task) => { + return this.checkNodesProxiesTask(ctx, task, ctx.config.allNodeIds) + } + }, + { + title: 'Trigger stake weight calculate', + task: async (ctx, task) => { + const config = /** @type {NodeUpdateConfigClass} **/ ctx.config + await this.triggerStakeCalculation(config) + } + }, + { + title: 'Finalize', + task: (ctx, _) => { + // reset flags so that keys are not regenerated later + self.configManager.setFlag(flags.generateGossipKeys, false) + self.configManager.setFlag(flags.generateTlsKeys, false) + self.configManager.persist() + } + } + ], { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + + try { + await tasks.run() + } catch (e) { + self.logger.error(`Error in updating nodes: ${e.message}`, e) + this.logger.error(e.stack) + throw new FullstackTestingError(`Error in updating nodes: ${e.message}`, e) + } finally { + await self.close() + } + + return true + } + + async delete (argv) { + const self = this + + const tasks = new Listr([ + { + title: 'Initialize', + task: async (ctx, task) => { + self.configManager.update(argv) + + // disable the prompts that we don't want to prompt the user for + prompts.disablePrompts([ + flags.app, + flags.chainId, + flags.chartDirectory, + flags.devMode, + flags.debugNodeId, + flags.endpointType, + flags.force, + flags.fstChartVersion, + flags.localBuildPath + ]) + + await prompts.execute(task, self.configManager, NodeCommand.DELETE_FLAGS_LIST) + + /** + * @typedef {Object} NodeDeleteConfigClass + * -- flags -- + * @property {string} app + * @property {string} cacheDir + * @property {string} chartDirectory + * @property {boolean} devMode + * @property {string} debugNodeId + * @property {string} endpointType + * @property {string} fstChartVersion + * @property {string} localBuildPath + * @property {string} namespace + * @property {string} nodeId + * @property {string} releaseTag + * -- extra args -- + * @property {PrivateKey} adminKey + * @property {string[]} allNodeIds + * @property {string} chartPath + * @property {string[]} existingNodeIds + * @property {string} freezeAdminPrivateKey + * @property {string} keysDir + * @property {Object} nodeClient + * @property {Object} podNames + * @property {Map} serviceMap + * @property {string} stagingDir + * @property {string} stagingKeysDir + * @property {PrivateKey} treasuryKey + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + // create a config object for subsequent steps + const config = /** @type {NodeDeleteConfigClass} **/ this.getConfig(NodeCommand.DELETE_CONFIGS_NAME, NodeCommand.DELETE_FLAGS_LIST, + [ + 'adminKey', + 'allNodeIds', + 'existingNodeIds', + 'freezeAdminPrivateKey', + 'keysDir', + 'nodeClient', + 'podNames', + 'serviceMap', + 'stagingDir', + 'stagingKeysDir', + 'treasuryKey' + ]) + + config.curDate = new Date() + config.existingNodeIds = [] + + await self.initializeSetup(config, self.k8) + + // set config in the context for later tasks to use + ctx.config = config + + ctx.config.chartPath = await self.prepareChartPath(ctx.config.chartDirectory, + constants.FULLSTACK_TESTING_CHART, constants.FULLSTACK_DEPLOYMENT_CHART) + + // initialize Node Client with existing network nodes prior to adding the new node which isn't functioning, yet + ctx.config.nodeClient = await this.accountManager.loadNodeClient(ctx.config.namespace) + + const accountKeys = await this.accountManager.getAccountKeysFromSecret(FREEZE_ADMIN_ACCOUNT, config.namespace) + config.freezeAdminPrivateKey = accountKeys.privateKey + + const treasuryAccount = await this.accountManager.getTreasuryAccountKeys(config.namespace) + const treasuryAccountPrivateKey = treasuryAccount.privateKey + config.treasuryKey = PrivateKey.fromStringED25519(treasuryAccountPrivateKey) + + self.logger.debug('Initialized config', { config }) + } + }, + { + title: 'Identify existing network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + return this.identifyExistingNetworkNodes(ctx, task, config) + } + }, + { + title: 'Load node admin key', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + config.adminKey = PrivateKey.fromStringED25519(constants.GENESIS_KEY) + } + }, + { + title: 'Prepare upgrade zip file for node upgrade process', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + ctx.upgradeZipFile = await this.prepareUpgradeZip(config.stagingDir) + ctx.upgradeZipHash = await this.uploadUpgradeZip(ctx.upgradeZipFile, config.nodeClient) + } + }, + { + title: 'Check existing nodes staked amount', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + await this.checkStakingTask(config.existingNodeIds) + } + }, + { + title: 'Send node delete transaction', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + + try { + const accountMap = getNodeAccountMap(config.existingNodeIds) + const deleteAccountId = accountMap.get(config.nodeId) + this.logger.debug(`Deleting node: ${config.nodeId} with account: ${deleteAccountId}`) + const nodeId = Templates.nodeNumberFromNodeId(config.nodeId) - 1 + const nodeDeleteTx = await new NodeDeleteTransaction() + .setNodeId(nodeId) + .freezeWith(config.nodeClient) + + const signedTx = await nodeDeleteTx.sign(config.adminKey) + const txResp = await signedTx.execute(config.nodeClient) + const nodeUpdateReceipt = await txResp.getReceipt(config.nodeClient) + this.logger.debug(`NodeUpdateReceipt: ${nodeUpdateReceipt.toString()}`) + } catch (e) { + this.logger.error(`Error deleting node from network: ${e.message}`, e) + throw new FullstackTestingError(`Error deleting node from network: ${e.message}`, e) + } + } + }, + { + title: 'Send prepare upgrade transaction', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + await this.prepareUpgradeNetworkNodes(config.freezeAdminPrivateKey, ctx.upgradeZipHash, config.nodeClient) + } + }, + { + title: 'Download generated files from an existing node', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + await this.downloadNodeGeneratedFiles(config) + } + }, + { + title: 'Send freeze upgrade transaction', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + await this.freezeUpgradeNetworkNodes(config.freezeAdminPrivateKey, ctx.upgradeZipHash, config.nodeClient) + } + }, + { + title: 'Prepare staging directory', + task: async (ctx, parentTask) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + return this.prepareStagingTask(ctx, parentTask, config.keysDir, config.stagingKeysDir, config.existingNodeIds) + } + }, + { + title: 'Copy node keys to secrets', + task: async (ctx, parentTask) => { + // remove nodeId from existingNodeIds + ctx.config.allNodeIds = ctx.config.existingNodeIds.filter(nodeId => nodeId !== ctx.config.nodeId) + return this.copyNodeKeyTask(ctx, parentTask) + } + }, + { + title: 'Check network nodes are frozen', + task: (ctx, task) => { + return this.checkNodeActivenessTask(ctx, task, ctx.config.existingNodeIds, NodeStatusCodes.FREEZE_COMPLETE) + } + }, + { + title: 'Get node logs and configs', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + await helpers.getNodeLogs(self.k8, config.namespace) + } + }, + { + title: 'Update chart to use new configMap', + task: async (ctx, task) => { + await this.chartUpdateTask(ctx) + } + }, + { + title: 'Kill nodes to pick up updated configMaps', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + for (const /** @type {NetworkNodeServices} **/ service of config.serviceMap.values()) { + await self.k8.kubeClient.deleteNamespacedPod(service.nodePodName, config.namespace, undefined, undefined, 1) + } + } + }, + { + title: 'Check node pods are running', + task: + async (ctx, task) => { + self.logger.info('sleep 20 seconds to give time for pods to come up after being killed') + await sleep(20000) + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + return this.checkPodRunningTask(ctx, task, config.allNodeIds) + } + }, + { + title: 'Fetch platform software into all network nodes', + task: + async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + config.serviceMap = await self.accountManager.getNodeServiceMap( + config.namespace) + config.podNames[config.nodeId] = config.serviceMap.get( + config.nodeId).nodePodName + return self.fetchLocalOrReleasedPlatformSoftware(config.allNodeIds, config.podNames, config.releaseTag, task, config.localBuildPath) + } + }, + { + title: 'Setup network nodes', + task: async (ctx, parentTask) => { + return this.setupNodesTask(ctx, parentTask, ctx.config.allNodeIds) + } + }, + { + title: 'Start network nodes', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + return this.startNetworkNodesTask(task, config.podNames, config.allNodeIds) + } + }, + { + title: 'Enable port forwarding for JVM debugger', + task: async (ctx, _) => { + await this.enableJVMPortForwarding(ctx.config.debugNodeId) + }, + skip: (ctx, _) => !ctx.config.debugNodeId + }, + { + title: 'Check all nodes are ACTIVE', + task: async (ctx, task) => { + return this.checkNodeActivenessTask(ctx, task, ctx.config.allNodeIds) + } + }, + { + title: 'Check all node proxies are ACTIVE', + // this is more reliable than checking the nodes logs for ACTIVE, as the + // logs will have a lot of white noise from being behind + task: async (ctx, task) => { + return this.checkNodesProxiesTask(ctx, task, ctx.config.allNodeIds) + } + }, + { + title: 'Trigger stake weight calculate', + task: async (ctx, task) => { + const config = /** @type {NodeDeleteConfigClass} **/ ctx.config + await this.triggerStakeCalculation(config) + } + }, + { + title: 'Finalize', + task: (ctx, _) => { + // reset flags so that keys are not regenerated later + self.configManager.setFlag(flags.generateGossipKeys, false) + self.configManager.setFlag(flags.generateTlsKeys, false) + self.configManager.persist() + } + } + ], { + concurrent: false, + rendererOptions: constants.LISTR_DEFAULT_RENDERER_OPTION + }) + + try { + await tasks.run() + } catch (e) { + self.logger.error(`Error in deleting nodes: ${e.message}`, e) + throw new FullstackTestingError(`Error in deleting nodes: ${e.message}`, e) + } finally { + await self.close() + } + + return true } } diff --git a/src/commands/prompts.mjs b/src/commands/prompts.mjs index bbc6ade13..69c134b26 100644 --- a/src/commands/prompts.mjs +++ b/src/commands/prompts.mjs @@ -14,12 +14,14 @@ * limitations under the License. * */ +'use strict' import { ListrEnquirerPromptAdapter } from '@listr2/prompt-adapter-enquirer' import fs from 'fs' import { FullstackTestingError, IllegalArgumentError } from '../core/errors.mjs' import { ConfigManager, constants } from '../core/index.mjs' import * as flags from './flags.mjs' import * as helpers from '../core/helpers.mjs' +import { resetDisabledPrompts } from './flags.mjs' async function prompt (type, task, input, defaultValue, promptMessage, emptyCheckMessage, flagName) { try { @@ -70,7 +72,7 @@ export async function promptClusterSetupNamespace (task, input) { export async function promptNodeIds (task, input) { return await prompt('input', task, input, - 'node0,node1,node2', + 'node1,node2,node3', 'Enter list of node IDs (comma separated list): ', null, flags.nodeIDs.name) @@ -182,7 +184,6 @@ export async function promptProfile (task, input, choices = constants.ALL_PROFIL if (initial < 0) { const input = await task.prompt(ListrEnquirerPromptAdapter).run({ type: 'select', - initial: choices.indexOf(flags.keyFormat.definition.defaultValue), message: 'Select profile for fullstack network deployment', choices: helpers.cloneArray(choices) }) @@ -196,7 +197,7 @@ export async function promptProfile (task, input, choices = constants.ALL_PROFIL return input } catch (e) { - throw new FullstackTestingError(`input failed: ${flags.keyFormat.name}: ${e.message}`, e) + throw new FullstackTestingError(`input failed: ${flags.profileName.name}`, e) } } @@ -240,14 +241,6 @@ export async function promptDeployCertManagerCrds (task, input) { flags.deployCertManagerCrds.name) } -export async function promptDeployMirrorNode (task, input) { - return await promptToggle(task, input, - flags.deployMirrorNode.definition.defaultValue, - 'Would you like to deploy Hedera Mirror Node? ', - null, - flags.deployMirrorNode.name) -} - export async function promptDeployHederaExplorer (task, input) { return await promptToggle(task, input, flags.deployHederaExplorer.definition.defaultValue, @@ -348,30 +341,6 @@ export async function promptDeleteSecrets (task, input) { flags.deleteSecrets.name) } -export async function promptKeyFormat (task, input, choices = [constants.KEY_FORMAT_PFX, constants.KEY_FORMAT_PEM]) { - try { - const initial = choices.indexOf(input) - if (initial < 0) { - const input = await task.prompt(ListrEnquirerPromptAdapter).run({ - type: 'select', - initial: choices.indexOf(flags.keyFormat.definition.defaultValue), - message: 'Select key format', - choices: helpers.cloneArray(choices) - }) - - if (!input) { - throw new FullstackTestingError('key-format cannot be empty') - } - - return input - } - - return input - } catch (e) { - throw new FullstackTestingError(`input failed: ${flags.keyFormat.name}: ${e.message}`, e) - } -} - export async function promptFstChartVersion (task, input) { return await promptText(task, input, flags.fstChartVersion.definition.defaultValue, @@ -412,6 +381,57 @@ export async function promptAmount (task, input) { flags.amount.name) } +export async function promptNewNodeId (task, input) { + return await promptText(task, input, + flags.nodeID.definition.defaultValue, + 'Enter the new node id: ', + null, + flags.nodeID.name) +} + +export async function promptGossipEndpoints (task, input) { + return await promptText(task, input, + flags.gossipEndpoints.definition.defaultValue, + 'Enter the gossip endpoints(comma separated): ', + null, + flags.gossipEndpoints.name) +} + +export async function promptGrpcEndpoints (task, input) { + return await promptText(task, input, + flags.grpcEndpoints.definition.defaultValue, + 'Enter the gRPC endpoints(comma separated): ', + null, + flags.grpcEndpoints.name) +} + +export async function promptEndpointType (task, input) { + return await promptText(task, input, + flags.endpointType.definition.defaultValue, + 'Enter the endpoint type(IP or FQDN): ', + null, + flags.endpointType.name) +} + +export async function promptPersistentVolumeClaims (task, input) { + return await promptToggle(task, input, + flags.persistentVolumeClaims.definition.defaultValue, + 'Would you like to enable persistent volume claims to store data outside the pod? ', + null, + flags.persistentVolumeClaims.name) +} + +export async function promptMirrorNodeVersion (task, input) { + return await promptToggle(task, input, + flags.mirrorNodeVersion.definition.defaultValue, + 'Would you like to choose mirror node version? ', + null, + flags.mirrorNodeVersion.name) +} + +/** + * @returns {Map} + */ export function getPromptMap () { return new Map() .set(flags.accountId.name, promptAccountId) @@ -426,7 +446,6 @@ export function getPromptMap () { .set(flags.deployCertManagerCrds.name, promptDeployCertManagerCrds) .set(flags.deployHederaExplorer.name, promptDeployHederaExplorer) .set(flags.deployMinio.name, promptDeployMinio) - .set(flags.deployMirrorNode.name, promptDeployMirrorNode) .set(flags.deployPrometheusStack.name, promptDeployPrometheusStack) .set(flags.enableHederaExplorerTls.name, promptEnableHederaExplorerTls) .set(flags.enablePrometheusSvcMonitor.name, promptEnablePrometheusSvcMonitor) @@ -435,11 +454,11 @@ export function getPromptMap () { .set(flags.generateGossipKeys.name, promptGenerateGossipKeys) .set(flags.generateTlsKeys.name, promptGenerateTLSKeys) .set(flags.hederaExplorerTlsHostName.name, promptHederaExplorerTlsHostName) - .set(flags.keyFormat.name, promptKeyFormat) .set(flags.namespace.name, promptNamespace) .set(flags.nodeIDs.name, promptNodeIds) .set(flags.operatorId.name, promptOperatorId) .set(flags.operatorKey.name, promptOperatorKey) + .set(flags.persistentVolumeClaims.name, promptPersistentVolumeClaims) .set(flags.privateKey.name, promptPrivateKey) .set(flags.profileFile.name, promptProfileFile) .set(flags.profileName.name, promptProfile) @@ -449,6 +468,11 @@ export function getPromptMap () { .set(flags.tlsClusterIssuerType.name, promptTlsClusterIssuerType) .set(flags.updateAccountKeys.name, promptUpdateAccountKeys) .set(flags.valuesFile.name, promptValuesFile) + .set(flags.nodeID.name, promptNewNodeId) + .set(flags.gossipEndpoints.name, promptGossipEndpoints) + .set(flags.grpcEndpoints.name, promptGrpcEndpoints) + .set(flags.endpointType.name, promptEndpointType) + .set(flags.mirrorNodeVersion.name, promptMirrorNodeVersion) } // build the prompt registry @@ -457,7 +481,7 @@ export function getPromptMap () { * @param task task object from listr2 * @param configManager config manager to store flag values * @param {CommandFlag[]} flagList list of flag objects - * @return {Promise} + * @returns {Promise} */ export async function execute (task, configManager, flagList = []) { if (!configManager || !(configManager instanceof ConfigManager)) { @@ -486,6 +510,7 @@ export async function execute (task, configManager, flagList = []) { * @param {CommandFlag[]} flags list of flags to disable prompts for */ export function disablePrompts (flags) { + resetDisabledPrompts() for (const flag of flags) { if (flag.definition) { flag.definition.disablePrompt = true diff --git a/src/commands/relay.mjs b/src/commands/relay.mjs index 0321753e2..77a8e4e31 100644 --- a/src/commands/relay.mjs +++ b/src/commands/relay.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import { Listr } from 'listr2' import { FullstackTestingError, MissingArgumentError } from '../core/errors.mjs' import * as helpers from '../core/helpers.mjs' @@ -24,6 +25,11 @@ import * as prompts from './prompts.mjs' import { getNodeAccountMap } from '../core/helpers.mjs' export class RelayCommand extends BaseCommand { + /** + * @param {{profileManager: ProfileManager, accountManager: AccountManager, logger: Logger, helm: Helm, k8: K8, + * chartManager: ChartManager, configManager: ConfigManager, depManager: DependencyManager, + * downloader: PackageDownloader}} opts + */ constructor (opts) { super(opts) @@ -33,11 +39,45 @@ export class RelayCommand extends BaseCommand { this.accountManager = opts.accountManager } + /** + * @returns {string} + */ + static get DEPLOY_CONFIGS_NAME () { + return 'deployConfigs' + } + + /** + * @returns {CommandFlag[]} + */ + static get DEPLOY_FLAGS_LIST () { + return [ + flags.chainId, + flags.chartDirectory, + flags.namespace, + flags.nodeIDs, + flags.operatorId, + flags.operatorKey, + flags.profileFile, + flags.profileName, + flags.relayReleaseTag, + flags.replicaCount, + flags.valuesFile + ] + } + + /** + * @param {string} valuesFile + * @param {string[]} nodeIDs + * @param {string} chainID + * @param {string} relayRelease + * @param {number} replicaCount + * @param {string} operatorID + * @param {string} operatorKey + * @param {string} namespace + * @returns {Promise} + */ async prepareValuesArg (valuesFile, nodeIDs, chainID, relayRelease, replicaCount, operatorID, operatorKey, namespace) { let valuesArg = '' - if (valuesFile) { - valuesArg += this.prepareValuesFiles(valuesFile) - } const profileName = this.configManager.getFlag(flags.profileName) const profileValuesFile = await this.profileManager.prepareValuesForRpcRelayChart(profileName) @@ -77,11 +117,21 @@ export class RelayCommand extends BaseCommand { const networkJsonString = await this.prepareNetworkJsonString(nodeIDs, namespace) valuesArg += ` --set config.HEDERA_NETWORK='${networkJsonString}'` + + if (valuesFile) { + valuesArg += this.prepareValuesFiles(valuesFile) + } + return valuesArg } - // created a json string to represent the map between the node keys and their ids - // output example '{"node-1": "0.0.3", "node-2": "0.004"}' + /** + * created a json string to represent the map between the node keys and their ids + * output example '{"node-1": "0.0.3", "node-2": "0.004"}' + * @param {string[]} nodeIDs + * @param {string} namespace + * @returns {Promise} + */ async prepareNetworkJsonString (nodeIDs = [], namespace) { if (!nodeIDs) { throw new MissingArgumentError('Node IDs must be specified') @@ -92,15 +142,19 @@ export class RelayCommand extends BaseCommand { const networkNodeServicesMap = await this.accountManager.getNodeServiceMap(namespace) nodeIDs.forEach(nodeID => { - const nodeName = networkNodeServicesMap.get(nodeID).nodeName + const haProxyClusterIp = networkNodeServicesMap.get(nodeID).haProxyClusterIp const haProxyGrpcPort = networkNodeServicesMap.get(nodeID).haProxyGrpcPort - const networkKey = `network-${nodeName}:${haProxyGrpcPort}` + const networkKey = `${haProxyClusterIp}:${haProxyGrpcPort}` networkIds[networkKey] = accountMap.get(nodeID) }) return JSON.stringify(networkIds) } + /** + * @param {string[]} nodeIDs + * @returns {string} + */ prepareReleaseName (nodeIDs = []) { if (!nodeIDs) { throw new MissingArgumentError('Node IDs must be specified') @@ -114,6 +168,10 @@ export class RelayCommand extends BaseCommand { return releaseName } + /** + * @param {Object} argv + * @returns {Promise} + */ async deploy (argv) { const self = this const tasks = new Listr([ @@ -125,35 +183,43 @@ export class RelayCommand extends BaseCommand { self.configManager.update(argv) - await prompts.execute(task, self.configManager, [ - flags.chainId, - flags.chartDirectory, - flags.namespace, - flags.nodeIDs, - flags.operatorId, - flags.operatorKey, - flags.profileFile, - flags.profileName, - flags.relayReleaseTag, - flags.replicaCount, - flags.valuesFile - ]) + await prompts.execute(task, self.configManager, RelayCommand.DEPLOY_FLAGS_LIST) + + /** + * @typedef {Object} RelayDeployConfigClass + * -- flags -- + * @property {string} chainId + * @property {string} chartDirectory + * @property {string} namespace + * @property {string} nodeIDs + * @property {string} operatorId + * @property {string} operatorKey + * @property {string} profileFile + * @property {string} profileName + * @property {string} relayReleaseTag + * @property {number} replicaCount + * @property {string} valuesFile + * -- extra args -- + * @property {string} chartPath + * @property {boolean} isChartInstalled + * @property {string[]} nodeIds + * @property {string} releaseName + * @property {string} valuesArg + * -- methods -- + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ // prompt if inputs are empty and set it in the context - ctx.config = { - chainId: self.configManager.getFlag(flags.chainId), - chartDir: self.configManager.getFlag(flags.chartDirectory), - namespace: self.configManager.getFlag(flags.namespace), - nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)), - operatorId: self.configManager.getFlag(flags.operatorId), - operatorKey: self.configManager.getFlag(flags.operatorKey), - relayRelease: self.configManager.getFlag(flags.relayReleaseTag), - replicaCount: self.configManager.getFlag(flags.replicaCount), - valuesFile: self.configManager.getFlag(flags.valuesFile) - } + ctx.config = /** @type {RelayDeployConfigClass} **/ this.getConfig(RelayCommand.DEPLOY_CONFIGS_NAME, RelayCommand.DEPLOY_FLAGS_LIST, + ['nodeIds']) - ctx.releaseName = self.prepareReleaseName(ctx.config.nodeIds) - ctx.isChartInstalled = await self.chartManager.isChartInstalled(ctx.config.namespace, ctx.releaseName) + ctx.config.nodeIds = helpers.parseNodeIds(ctx.config.nodeIDs) + ctx.config.releaseName = self.prepareReleaseName(ctx.config.nodeIds) + ctx.config.isChartInstalled = await self.chartManager.isChartInstalled(ctx.config.namespace, ctx.releaseName) self.logger.debug('Initialized config', { config: ctx.config }) } @@ -161,32 +227,30 @@ export class RelayCommand extends BaseCommand { { title: 'Prepare chart values', task: async (ctx, _) => { - ctx.chartPath = await self.prepareChartPath(ctx.config.chartDir, constants.JSON_RPC_RELAY_CHART, constants.JSON_RPC_RELAY_CHART) - ctx.valuesArg = await self.prepareValuesArg( - ctx.config.valuesFile, - ctx.config.nodeIds, - ctx.config.chainId, - ctx.config.relayRelease, - ctx.config.replicaCount, - ctx.config.operatorId, - ctx.config.operatorKey, - ctx.config.namespace + const config = /** @type {RelayDeployConfigClass} **/ ctx.config + config.chartPath = await self.prepareChartPath(config.chartDirectory, constants.JSON_RPC_RELAY_CHART, constants.JSON_RPC_RELAY_CHART) + config.valuesArg = await self.prepareValuesArg( + config.valuesFile, + config.nodeIds, + config.chainId, + config.relayReleaseTag, + config.replicaCount, + config.operatorId, + config.operatorKey, + config.namespace ) } }, { title: 'Deploy JSON RPC Relay', task: async (ctx, _) => { - const namespace = ctx.config.namespace - const releaseName = ctx.releaseName - const chartPath = ctx.chartPath - const valuesArg = ctx.valuesArg + const config = /** @type {RelayDeployConfigClass} **/ ctx.config - await self.chartManager.install(namespace, releaseName, chartPath, '', valuesArg) + await self.chartManager.install(config.namespace, config.releaseName, config.chartPath, '', config.valuesArg) await self.k8.waitForPods([constants.POD_PHASE_RUNNING], [ 'app=hedera-json-rpc-relay', - `app.kubernetes.io/instance=${releaseName}` + `app.kubernetes.io/instance=${config.releaseName}` ], 1, 900, 1000) // reset nodeID @@ -197,14 +261,14 @@ export class RelayCommand extends BaseCommand { { title: 'Check relay is ready', task: async (ctx, _) => { - const releaseName = ctx.releaseName + const config = /** @type {RelayDeployConfigClass} **/ ctx.config try { await self.k8.waitForPodReady([ 'app=hedera-json-rpc-relay', - `app.kubernetes.io/instance=${releaseName}` + `app.kubernetes.io/instance=${config.releaseName}` ], 1, 100, 2000) } catch (e) { - throw new FullstackTestingError(`Relay ${releaseName} is not ready: ${e.message}`, e) + throw new FullstackTestingError(`Relay ${config.releaseName} is not ready: ${e.message}`, e) } } } @@ -222,6 +286,10 @@ export class RelayCommand extends BaseCommand { return true } + /** + * @param {Object} argv + * @returns {Promise} + */ async destroy (argv) { const self = this @@ -241,13 +309,13 @@ export class RelayCommand extends BaseCommand { // prompt if inputs are empty and set it in the context ctx.config = { - chartDir: self.configManager.getFlag(flags.chartDirectory), + chartDirectory: self.configManager.getFlag(flags.chartDirectory), namespace: self.configManager.getFlag(flags.namespace), nodeIds: helpers.parseNodeIds(self.configManager.getFlag(flags.nodeIDs)) } - ctx.releaseName = this.prepareReleaseName(ctx.config.nodeIds) - ctx.isChartInstalled = await this.chartManager.isChartInstalled(ctx.config.namespace, ctx.releaseName) + ctx.config.releaseName = this.prepareReleaseName(ctx.config.nodeIds) + ctx.config.isChartInstalled = await this.chartManager.isChartInstalled(ctx.config.namespace, ctx.config.releaseName) self.logger.debug('Initialized config', { config: ctx.config }) } @@ -255,12 +323,11 @@ export class RelayCommand extends BaseCommand { { title: 'Destroy JSON RPC Relay', task: async (ctx, _) => { - const namespace = ctx.config.namespace - const releaseName = ctx.releaseName + const config = ctx.config - await this.chartManager.uninstall(namespace, releaseName) + await this.chartManager.uninstall(config.namespace, config.releaseName) - this.logger.showList('Destroyed Relays', await self.chartManager.getInstalledCharts(namespace)) + this.logger.showList('Destroyed Relays', await self.chartManager.getInstalledCharts(config.namespace)) // reset nodeID self.configManager.setFlag(flags.nodeIDs, '') @@ -282,32 +349,24 @@ export class RelayCommand extends BaseCommand { return true } + /** + * @param {RelayCommand} relayCmd + * @returns {{command: string, desc: string, builder: Function}} + */ static getCommandDefinition (relayCmd) { if (!relayCmd || !(relayCmd instanceof RelayCommand)) { throw new MissingArgumentError('An instance of RelayCommand is required', relayCmd) } return { command: 'relay', - desc: 'Manage JSON RPC relays in fullstack testing network', + desc: 'Manage JSON RPC relays in solo network', builder: yargs => { return yargs .command({ command: 'deploy', desc: 'Deploy a JSON RPC relay', builder: y => { - flags.setCommandFlags(y, - flags.chainId, - flags.chartDirectory, - flags.namespace, - flags.nodeIDs, - flags.operatorId, - flags.operatorKey, - flags.profileFile, - flags.profileName, - flags.relayReleaseTag, - flags.replicaCount, - flags.valuesFile - ) + flags.setCommandFlags(y, ...RelayCommand.DEPLOY_FLAGS_LIST) }, handler: argv => { relayCmd.logger.debug("==== Running 'relay install' ===", { argv }) @@ -326,6 +385,7 @@ export class RelayCommand extends BaseCommand { command: 'destroy', desc: 'Destroy JSON RPC relay', builder: y => flags.setCommandFlags(y, + flags.chartDirectory, flags.namespace, flags.nodeIDs ), diff --git a/src/core/account_manager.mjs b/src/core/account_manager.mjs index e3ac36c7f..728dd48e9 100644 --- a/src/core/account_manager.mjs +++ b/src/core/account_manager.mjs @@ -14,7 +14,6 @@ * limitations under the License. * */ -import * as HashgraphProto from '@hashgraph/proto' import * as Base64 from 'js-base64' import os from 'os' import * as constants from './constants.mjs' @@ -39,6 +38,14 @@ import { FullstackTestingError, MissingArgumentError } from './errors.mjs' import { Templates } from './templates.mjs' import ip from 'ip' import { NetworkNodeServicesBuilder } from './network_node_services.mjs' +import path from 'path' + +/** + * @typedef {Object} AccountIdWithKeyPairObject + * @property {string} accountId + * @property {string} privateKey + * @property {string} publicKey + */ const REASON_FAILED_TO_GET_KEYS = 'failed to get keys for accountId' const REASON_SKIPPED = 'skipped since it does not have a genesis key' @@ -50,8 +57,8 @@ const REJECTED = 'rejected' export class AccountManager { /** * creates a new AccountManager instance - * @param logger the logger to use - * @param k8 the K8 instance + * @param {Logger} logger - the logger to use + * @param {K8} k8 - the K8 instance */ constructor (logger, k8) { if (!logger) throw new Error('An instance of core/Logger is required') @@ -60,15 +67,19 @@ export class AccountManager { this.logger = logger this.k8 = k8 this._portForwards = [] + + /** + * @type {NodeClient|null} + * @public + */ this._nodeClient = null } /** * Gets the account keys from the Kubernetes secret from which it is stored - * @param accountId the account ID for which we want its keys - * @param namespace the namespace that is storing the secret - * @returns {Promise<{accountId: string, privateKey: string, publicKey: string}|null>} a - * custom object with the account id, private key, and public key + * @param {string} accountId - the account ID for which we want its keys + * @param {string} namespace - the namespace that is storing the secret + * @returns {Promise} */ async getAccountKeysFromSecret (accountId, namespace) { const secret = await this.k8.getSecret(namespace, Templates.renderAccountKeySecretLabelSelector(accountId)) @@ -79,7 +90,12 @@ export class AccountManager { publicKey: Base64.decode(secret.data.publicKey) } } else { - return null + // if it isn't in the secrets we can load genesis key + return { + accountId, + privateKey: constants.GENESIS_KEY, + publicKey: PrivateKey.fromStringED25519(constants.GENESIS_KEY).publicKey.toString() + } } } @@ -87,31 +103,21 @@ export class AccountManager { * Gets the treasury account private key from Kubernetes secret if it exists, else * returns the Genesis private key, then will return an AccountInfo object with the * accountId, privateKey, publicKey - * @param namespace the namespace that the secret is in - * @returns {Promise<{accountId: string, privateKey: string, publicKey: string}>} + * @param {string} namespace - the namespace that the secret is in + * @returns {Promise} */ async getTreasuryAccountKeys (namespace) { // check to see if the treasure account is in the secrets - let accountInfo = await this.getAccountKeysFromSecret(constants.TREASURY_ACCOUNT_ID, namespace) - - // if it isn't in the secrets we can load genesis key - if (!accountInfo) { - accountInfo = { - accountId: constants.TREASURY_ACCOUNT_ID, - privateKey: constants.GENESIS_KEY, - publicKey: PrivateKey.fromStringED25519(constants.GENESIS_KEY).publicKey.toString() - } - } - - return accountInfo + return await this.getAccountKeysFromSecret(constants.TREASURY_ACCOUNT_ID, namespace) } /** * batch up the accounts into sets to be processed - * @returns an array of arrays of numbers representing the accounts to update + * @param {number[][]} [accountRange] + * @returns {number[][]} an array of arrays of numbers representing the accounts to update */ batchAccounts (accountRange = constants.SYSTEM_ACCOUNTS) { - const batchSize = constants.ACCOUNT_CREATE_BATCH_SIZE + const batchSize = constants.ACCOUNT_UPDATE_BATCH_SIZE const batchSets = [] let currentBatch = [] @@ -156,17 +162,29 @@ export class AccountManager { /** * loads and initializes the Node Client - * @param namespace the namespace of the network - * @returns {Promise} + * @param {string} namespace - the namespace of the network + * @returns {Promise} */ async loadNodeClient (namespace) { if (!this._nodeClient || this._nodeClient.isClientShutDown) { - const treasuryAccountInfo = await this.getTreasuryAccountKeys(namespace) - const networkNodeServicesMap = await this.getNodeServiceMap(namespace) - - this._nodeClient = await this._getNodeClient(namespace, - networkNodeServicesMap, treasuryAccountInfo.accountId, treasuryAccountInfo.privateKey) + await this.refreshNodeClient(namespace) } + + return this._nodeClient + } + + /** + * loads and initializes the Node Client + * @param namespace the namespace of the network + * @returns {Promise} + */ + async refreshNodeClient (namespace) { + await this.close() + const treasuryAccountInfo = await this.getTreasuryAccountKeys(namespace) + const networkNodeServicesMap = await this.getNodeServiceMap(namespace) + + this._nodeClient = await this._getNodeClient(namespace, + networkNodeServicesMap, treasuryAccountInfo.accountId, treasuryAccountInfo.privateKey) } /** @@ -205,10 +223,10 @@ export class AccountManager { /** * Returns a node client that can be used to make calls against - * @param namespace the namespace for which the node client resides - * @param {Map}networkNodeServicesMap a map of the service objects that proxy the nodes - * @param operatorId the account id of the operator of the transactions - * @param operatorKey the private key of the operator of the transactions + * @param {string} namespace - the namespace for which the node client resides + * @param {Map} networkNodeServicesMap - a map of the service objects that proxy the nodes + * @param {string} operatorId - the account id of the operator of the transactions + * @param {string} operatorKey - the private key of the operator of the transactions * @returns {Promise} a node client that can be used to call transactions */ async _getNodeClient (namespace, networkNodeServicesMap, operatorId, operatorKey) { @@ -222,7 +240,7 @@ export class AccountManager { const port = networkNodeService.haProxyGrpcPort const targetPort = usePortForward ? localPort : port - if (usePortForward) { + if (usePortForward && this._portForwards.length < networkNodeServicesMap.size) { this._portForwards.push(await this.k8.portForward(networkNodeService.haProxyPodName, localPort, port)) } @@ -232,9 +250,10 @@ export class AccountManager { } this.logger.debug(`creating client from network configuration: ${JSON.stringify(nodes)}`) - this._nodeClient = Client.fromConfig({ network: nodes }) + // scheduleNetworkUpdate is set to false, because the ports 50212/50211 are hardcoded in JS SDK that will not work when running locally or in a pipeline + this._nodeClient = Client.fromConfig({ network: nodes, scheduleNetworkUpdate: false }) this._nodeClient.setOperator(operatorId, operatorKey) - this._nodeClient.setLogger(new Logger(LogLevel.Trace, `${constants.SOLO_LOGS_DIR}/hashgraph-sdk.log`)) + this._nodeClient.setLogger(new Logger(LogLevel.Trace, path.join(constants.SOLO_LOGS_DIR, 'hashgraph-sdk.log'))) this._nodeClient.setMaxAttempts(constants.NODE_CLIENT_MAX_ATTEMPTS) this._nodeClient.setMinBackoff(constants.NODE_CLIENT_MIN_BACKOFF) this._nodeClient.setMaxBackoff(constants.NODE_CLIENT_MAX_BACKOFF) @@ -247,14 +266,13 @@ export class AccountManager { /** * Gets a Map of the Hedera node services and the attributes needed - * @param namespace the namespace of the fullstack network deployment - * @returns {Promise>} a map of the network node services + * @param {string} namespace - the namespace of the fullstack network deployment + * @returns {Promise>} a map of the network node services */ async getNodeServiceMap (namespace) { const labelSelector = 'fullstack.hedera.com/node-name' - /** @type {Map} **/ - const serviceBuilderMap = new Map() + const serviceBuilderMap = /** @type {Map} **/ new Map() const serviceList = await this.k8.kubeClient.listNamespacedService( namespace, undefined, undefined, undefined, undefined, labelSelector) @@ -306,6 +324,15 @@ export class AccountManager { serviceBuilder.withHaProxyPodName(podList.body.items[0].metadata.name) } + // get the pod name of the network node + const pods = await this.k8.getPodsByLabel(['fullstack.hedera.com/type=network-node']) + for (const pod of pods) { + const podName = pod.metadata.name + const nodeName = pod.metadata.labels['fullstack.hedera.com/node-name'] + const serviceBuilder = /** @type {NetworkNodeServicesBuilder} **/ serviceBuilderMap.get(nodeName) + serviceBuilder.withNodePodName(podName) + } + /** @type {Map} **/ const serviceMap = new Map() for (const networkNodeServicesBuilder of serviceBuilderMap.values()) { @@ -316,12 +343,11 @@ export class AccountManager { } /** - * updates a set of special accounts keys with a newly generated key and stores them in a - * Kubernetes secret - * @param namespace the namespace of the nodes network - * @param currentSet the accounts to update - * @param updateSecrets whether to delete the secret prior to creating a new secret - * @param resultTracker an object to keep track of the results from the accounts that are being updated + * updates a set of special accounts keys with a newly generated key and stores them in a Kubernetes secret + * @param {string} namespace the namespace of the nodes network + * @param {string[]} currentSet - the accounts to update + * @param {boolean} updateSecrets - whether to delete the secret prior to creating a new secret + * @param {Object} resultTracker - an object to keep track of the results from the accounts that are being updated * @returns {Promise<*>} the updated resultTracker object */ async updateSpecialAccountsKeys (namespace, currentSet, updateSecrets, resultTracker) { @@ -360,12 +386,11 @@ export class AccountManager { } /** - * update the account keys for a given account and store its new key in a Kubernetes - * secret - * @param namespace the namespace of the nodes network - * @param accountId the account that will get its keys updated - * @param genesisKey the genesis key to compare against - * @param updateSecrets whether to delete the secret prior to creating a new secret + * update the account keys for a given account and store its new key in a Kubernetes secret + * @param {string} namespace - the namespace of the nodes network + * @param {AccountId} accountId - the account that will get its keys updated + * @param {PrivateKey} genesisKey - the genesis key to compare against + * @param {boolean} updateSecrets - whether to delete the secret prior to creating a new secret * @returns {Promise<{value: string, status: string}|{reason: string, value: string, status: string}>} the result of the call */ async updateAccountKeys (namespace, accountId, genesisKey, updateSecrets) { @@ -454,7 +479,7 @@ export class AccountManager { /** * gets the account info from Hedera network - * @param accountId the account + * @param {AccountId|string} accountId - the account * @returns {AccountInfo} the private key of the account */ async accountInfoQuery (accountId) { @@ -471,17 +496,16 @@ export class AccountManager { /** * gets the account private and public key from the Kubernetes secret from which it is stored - * @param accountId the account + * @param {AccountId|string} accountId - the account * @returns {Promise} the private key of the account */ async getAccountKeys (accountId) { const accountInfo = await this.accountInfoQuery(accountId) - let keys + let keys = [] if (accountInfo.key instanceof KeyList) { keys = accountInfo.key.toArray() } else { - keys = [] keys.push(accountInfo.key) } @@ -490,9 +514,9 @@ export class AccountManager { /** * send an account key update transaction to the network of nodes - * @param accountId the account that will get it's keys updated - * @param newPrivateKey the new private key - * @param oldPrivateKey the genesis key that is the current key + * @param {AccountId|string} accountId - the account that will get its keys updated + * @param {PrivateKey|string} newPrivateKey - the new private key + * @param {PrivateKey|string} oldPrivateKey - the genesis key that is the current key * @returns {Promise} whether the update was successful */ async sendAccountKeyUpdate (accountId, newPrivateKey, oldPrivateKey) { @@ -525,13 +549,13 @@ export class AccountManager { /** * creates a new Hedera account - * @param namespace the namespace to store the Kubernetes key secret into - * @param privateKey the private key of type PrivateKey - * @param amount the amount of HBAR to add to the account - * @param setAlias whether to set the alias of the account to the public key, - * requires the privateKey supplied to be ECDSA - * @returns {{accountId: AccountId, privateKey: string, publicKey: string, balance: number}} a - * custom object with the account information in it + * @param {string} namespace - the namespace to store the Kubernetes key secret into + * @param {Key} privateKey - the private key of type PrivateKey + * @param {number} amount - the amount of HBAR to add to the account + * @param {boolean} [setAlias] - whether to set the alias of the account to the public key, requires + * the privateKey supplied to be ECDSA + * @returns {Promise<{accountId: AccountId, privateKey: string, publicKey: string, balance: number}>} a custom object with + * the account information in it */ async createNewAccount (namespace, privateKey, amount, setAlias = false) { const newAccountTransaction = new AccountCreateTransaction() @@ -588,9 +612,9 @@ export class AccountManager { /** * transfer the specified amount of HBAR from one account to another - * @param fromAccountId the account to pull the HBAR from - * @param toAccountId the account to put the HBAR - * @param hbarAmount the amount of HBAR + * @param {AccountId|string} fromAccountId - the account to pull the HBAR from + * @param {AccountId|string} toAccountId - the account to put the HBAR + * @param {number} hbarAmount - the amount of HBAR * @returns {Promise} if the transaction was successfully posted */ async transferAmount (fromAccountId, toAccountId, hbarAmount) { @@ -616,47 +640,12 @@ export class AccountManager { /** * Fetch and prepare address book as a base64 string * @param {string} namespace the namespace of the network - * @return {Promise} + * @returns {Promise} */ async prepareAddressBookBase64 (namespace) { // fetch AddressBook const fileQuery = new FileContentsQuery().setFileId(FileId.ADDRESS_BOOK) - let addressBookBytes = await fileQuery.execute(this._nodeClient) - - /** @type {Map} **/ - const networkNodeServicesMap = await this.getNodeServiceMap(namespace) - - // ensure serviceEndpoint.ipAddressV4 value for all nodes in the addressBook is a 4 bytes array instead of string - // See: https://github.com/hashgraph/hedera-protobufs/blob/main/services/basic_types.proto#L1309 - const addressBook = HashgraphProto.proto.NodeAddressBook.decode(addressBookBytes) - const hasAlphaRegEx = /[a-zA-Z]+/ - let modified = false - for (const nodeAddress of addressBook.nodeAddress) { - const address = nodeAddress.serviceEndpoint[0].ipAddressV4.toString() - - if (hasAlphaRegEx.test(address)) { - const nodeId = Templates.nodeIdFromFullyQualifiedNetworkSvcName(address) - nodeAddress.serviceEndpoint[0].ipAddressV4 = Uint8Array.from(ip.toBuffer(networkNodeServicesMap.get(nodeId).nodeServiceClusterIp)) - nodeAddress.ipAddress = Uint8Array.from(ip.toBuffer(networkNodeServicesMap.get(nodeId).nodeServiceClusterIp)) - modified = true - continue - } - // overwrite ipAddressV4 as 4 bytes array if required, unless there is alpha, which means it is a domain name - if (nodeAddress.serviceEndpoint[0].ipAddressV4.byteLength !== 4) { - const parts = address.split('.') - - if (parts.length !== 4) { - throw new FullstackTestingError(`expected node IP address to have 4 parts, found ${parts.length}: ${address}`) - } - - nodeAddress.serviceEndpoint[0].ipAddressV4 = Uint8Array.from(parts) - modified = true - } - } - - if (modified) { - addressBookBytes = HashgraphProto.proto.NodeAddressBook.encode(addressBook).finish() - } + const addressBookBytes = await fileQuery.execute(this._nodeClient) // convert addressBook into base64 return Base64.encode(addressBookBytes) diff --git a/src/core/chart_manager.mjs b/src/core/chart_manager.mjs index 4c14b1683..a6cbc2bf6 100644 --- a/src/core/chart_manager.mjs +++ b/src/core/chart_manager.mjs @@ -14,11 +14,16 @@ * limitations under the License. * */ +'use strict' import { constants } from './index.mjs' import chalk from 'chalk' import { FullstackTestingError } from './errors.mjs' export class ChartManager { + /** + * @param {Helm} helm + * @param {Logger} logger + */ constructor (helm, logger) { if (!logger) throw new Error('An instance of core/Logger is required') if (!helm) throw new Error('An instance of core/Helm is required') @@ -32,32 +37,41 @@ export class ChartManager { * * This must be invoked before calling other methods * - * @param repoURLs a map of name and chart repository URLs - * @param force whether or not to update the repo - * @returns {Promise} + * @param {Map} repoURLs - a map of name and chart repository URLs + * @param {boolean} force - whether or not to update the repo + * @returns {Promise} - returns the urls */ async setup (repoURLs = constants.DEFAULT_CHART_REPO, force = true) { try { - let forceUpdateArg = '' - if (force) { - forceUpdateArg = '--force-update' - } + const forceUpdateArg = force ? '--force-update' : '' - const urls = [] + /** @type {Array>} */ + const promises = [] for (const [name, url] of repoURLs.entries()) { - this.logger.debug(`Adding repo ${name} -> ${url}`, { repoName: name, repoURL: url }) - await this.helm.repo('add', name, url, forceUpdateArg) - urls.push(url) + promises.push(this.addRepo(name, url, forceUpdateArg)) } - return urls + return await Promise.all(promises) // urls } catch (e) { throw new FullstackTestingError(`failed to setup chart repositories: ${e.message}`, e) } } + /** + * @param {string} name + * @param {string} url + * @param {string} forceUpdateArg + * @returns {Promise} + */ + async addRepo (name, url, forceUpdateArg) { + this.logger.debug(`Adding repo ${name} -> ${url}`, { repoName: name, repoURL: url }) + await this.helm.repo('add', name, url, forceUpdateArg) + return url + } + /** * List available clusters + * @param {string} namespaceName * @returns {Promise} */ async getInstalledCharts (namespaceName) { @@ -70,6 +84,14 @@ export class ChartManager { return [] } + /** + * @param {string} namespaceName + * @param {string} chartReleaseName + * @param {string} chartPath + * @param {string} version + * @param {string} valuesArg + * @returns {Promise} + */ async install (namespaceName, chartReleaseName, chartPath, version, valuesArg = '') { try { const isInstalled = await this.isChartInstalled(namespaceName, chartReleaseName) @@ -97,6 +119,11 @@ export class ChartManager { return true } + /** + * @param {string} namespaceName + * @param {string} chartReleaseName + * @returns {Promise} + */ async isChartInstalled (namespaceName, chartReleaseName) { this.logger.debug(`> checking if chart is installed [ chart: ${chartReleaseName}, namespace: ${namespaceName} ]`) const charts = await this.getInstalledCharts(namespaceName) @@ -109,6 +136,11 @@ export class ChartManager { return false } + /** + * @param {string} namespaceName + * @param {string} chartReleaseName + * @returns {Promise} + */ async uninstall (namespaceName, chartReleaseName) { try { const isInstalled = await this.isChartInstalled(namespaceName, chartReleaseName) @@ -126,6 +158,14 @@ export class ChartManager { return true } + /** + * @param {string} namespaceName + * @param {string} chartReleaseName + * @param {string} chartPath + * @param {string} valuesArg + * @param {string} version + * @returns {Promise} + */ async upgrade (namespaceName, chartReleaseName, chartPath, valuesArg = '', version = '') { let versionArg = '' if (version) { diff --git a/src/core/config_manager.mjs b/src/core/config_manager.mjs index f9bfa3fa7..28efacdf9 100644 --- a/src/core/config_manager.mjs +++ b/src/core/config_manager.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import fs from 'fs' import { FullstackTestingError, MissingArgumentError } from './errors.mjs' import { constants } from './index.mjs' @@ -29,6 +30,10 @@ import * as helpers from './helpers.mjs' * doesn't need to enter it repeatedly. However, user should still be able to specify the flag explicitly for any command. */ export class ConfigManager { + /** + * @param {Logger} logger + * @param {PathLike} cachedConfigFile + */ constructor (logger, cachedConfigFile = constants.SOLO_CONFIG_FILE) { if (!logger || !(logger instanceof Logger)) throw new MissingArgumentError('An instance of core/Logger is required') if (!cachedConfigFile) throw new MissingArgumentError('cached config file path is required') @@ -45,6 +50,8 @@ export class ConfigManager { try { if (fs.existsSync(this.cachedConfigFile)) { const configJSON = fs.readFileSync(this.cachedConfigFile) + + /** @type {Object} */ this.config = JSON.parse(configJSON.toString()) } } catch (e) { @@ -71,9 +78,9 @@ export class ConfigManager { * 2. Cached config value of the command flag. * 3. Default value of the command flag if the command is not 'init'. * - * @param argv yargs.argv - * @param aliases yargv.parsed.aliases - * @return {*} updated argv + * @param {yargs.argv} argv + * @param {yargv.parsed.aliases} aliases + * @returns {Object} updated argv */ applyPrecedence (argv, aliases) { for (const key of Object.keys(aliases)) { @@ -95,8 +102,8 @@ export class ConfigManager { /** * Update the config using the argv * - * @param argv list of yargs argv - * @param persist + * @param {Object} [argv] - list of yargs argv + * @param {boolean} persist */ update (argv = {}, persist = false) { if (argv && Object.keys(argv).length > 0) { @@ -178,8 +185,8 @@ export class ConfigManager { /** * Check if a flag value is set - * @param flag flag object - * @return {boolean} + * @param {{name: string}} flag flag object + * @returns {boolean} */ hasFlag (flag) { return this.config.flags[flag.name] !== undefined @@ -188,8 +195,8 @@ export class ConfigManager { /** * Return the value of the given flag * - * @param flag flag object - * @return {*|string} value of the flag or undefined if flag value is not available + * @param {{name: string}} flag flag object + * @returns {undefined|string} value of the flag or undefined if flag value is not available */ getFlag (flag) { if (this.config.flags[flag.name] !== undefined) { @@ -201,8 +208,8 @@ export class ConfigManager { /** * Set value for the flag - * @param flag flag object - * @param value value of the flag + * @param {{name: string}} flag - flag object + * @param value - value of the flag */ setFlag (flag, value) { @@ -212,7 +219,7 @@ export class ConfigManager { /** * Get package version - * @return {*} + * @returns {*} */ getVersion () { return this.config.version @@ -220,7 +227,7 @@ export class ConfigManager { /** * Get last updated at timestamp - * @return {string} + * @returns {string} */ getUpdatedAt () { return this.config.updatedAt diff --git a/src/core/constants.mjs b/src/core/constants.mjs index 523517e9e..510d3d407 100644 --- a/src/core/constants.mjs +++ b/src/core/constants.mjs @@ -14,22 +14,24 @@ * limitations under the License. * */ -import { AccountId } from '@hashgraph/sdk' +'use strict' +import { AccountId, FileId } from '@hashgraph/sdk' import { color, PRESET_TIMER } from 'listr2' -import { dirname, normalize } from 'path' +import path, { dirname, normalize } from 'path' import { fileURLToPath } from 'url' // -------------------- solo related constants --------------------------------------------------------------------- export const CUR_FILE_DIR = dirname(fileURLToPath(import.meta.url)) -export const SOLO_HOME_DIR = process.env.SOLO_HOME || `${process.env.HOME}/.solo` -export const SOLO_LOGS_DIR = `${SOLO_HOME_DIR}/logs` -export const SOLO_CACHE_DIR = `${SOLO_HOME_DIR}/cache` -export const SOLO_VALUES_DIR = `${SOLO_CACHE_DIR}/values-files` +export const SOLO_HOME_DIR = process.env.SOLO_HOME || path.join(process.env.HOME, '.solo') +export const SOLO_LOGS_DIR = path.join(SOLO_HOME_DIR, 'logs') +export const SOLO_CACHE_DIR = path.join(SOLO_HOME_DIR, 'cache') +export const SOLO_VALUES_DIR = path.join(SOLO_CACHE_DIR, 'values-files') export const DEFAULT_NAMESPACE = 'default' export const HELM = 'helm' export const KEYTOOL = 'keytool' -export const SOLO_CONFIG_FILE = `${SOLO_HOME_DIR}/solo.config` -export const RESOURCES_DIR = normalize(CUR_FILE_DIR + '/../../resources') +export const SOLO_CONFIG_FILE = path.join(SOLO_HOME_DIR, 'solo.config') +export const RESOURCES_DIR = normalize(path.join(CUR_FILE_DIR, '..', '..', 'resources')) +export const TEMP_DIR = normalize(path.join(CUR_FILE_DIR, '..', '..', 'temp')) export const ROOT_CONTAINER = 'root-container' @@ -46,7 +48,7 @@ export const HEDERA_BUILDS_URL = 'https://builds.hedera.com' export const HEDERA_NODE_ACCOUNT_ID_START = AccountId.fromString(process.env.SOLO_NODE_ACCOUNT_ID_START || '0.0.3') export const HEDERA_NODE_INTERNAL_GOSSIP_PORT = process.env.SOLO_NODE_INTERNAL_GOSSIP_PORT || '50111' export const HEDERA_NODE_EXTERNAL_GOSSIP_PORT = process.env.SOLO_NODE_EXTERNAL_GOSSIP_PORT || '50111' -export const HEDERA_NODE_DEFAULT_STAKE_AMOUNT = process.env.SOLO_NODE_DEFAULT_STAKE_AMOUNT || 1 +export const HEDERA_NODE_DEFAULT_STAKE_AMOUNT = process.env.SOLO_NODE_DEFAULT_STAKE_AMOUNT || 500 // --------------- Charts related constants ---------------------------------------------------------------------------- export const FULLSTACK_SETUP_NAMESPACE = 'fullstack-setup' @@ -58,6 +60,8 @@ export const JSON_RPC_RELAY_CHART_URL = 'https://hashgraph.github.io/hedera-json export const JSON_RPC_RELAY_CHART = 'hedera-json-rpc-relay' export const MIRROR_NODE_CHART_URL = 'https://hashgraph.github.io/hedera-mirror-node/charts' export const MIRROR_NODE_CHART = 'hedera-mirror' + +/** @type {Map} */ export const DEFAULT_CHART_REPO = new Map() .set(FULLSTACK_TESTING_CHART, FULLSTACK_TESTING_CHART_URL) .set(JSON_RPC_RELAY_CHART, JSON_RPC_RELAY_CHART_URL) @@ -67,13 +71,17 @@ export const DEFAULT_CHART_REPO = new Map() export const OPERATOR_ID = process.env.SOLO_OPERATOR_ID || '0.0.2' export const OPERATOR_KEY = process.env.SOLO_OPERATOR_KEY || '302e020100300506032b65700422042091132178e72057a1d7528025956fe39b0b847f200ab59b2fdd367017f3087137' export const OPERATOR_PUBLIC_KEY = process.env.SOLO_OPERATOR_PUBLIC_KEY || '302a300506032b65700321000aa8e21064c61eab86e2a9c164565b4e7a9a4146106e0a6cd03a8c395a110e92' +export const FREEZE_ADMIN_ACCOUNT = process.env.FREEZE_ADMIN_ACCOUNT || `${HEDERA_NODE_ACCOUNT_ID_START.realm}.${HEDERA_NODE_ACCOUNT_ID_START.shard}.58` export const TREASURY_ACCOUNT_ID = `${HEDERA_NODE_ACCOUNT_ID_START.realm}.${HEDERA_NODE_ACCOUNT_ID_START.shard}.2` +export const COUNCIL_ACCOUNT_ID = `${HEDERA_NODE_ACCOUNT_ID_START.realm}.${HEDERA_NODE_ACCOUNT_ID_START.shard}.55` export const GENESIS_KEY = process.env.GENESIS_KEY || '302e020100300506032b65700422042091132178e72057a1d7528025956fe39b0b847f200ab59b2fdd367017f3087137' export const SYSTEM_ACCOUNTS = [[3, 100], [200, 349], [400, 750], [900, 1000]] // do account 0.0.2 last and outside the loop +export const SHORTER_SYSTEM_ACCOUNTS = [[3, 60]] export const TREASURY_ACCOUNT = 2 export const LOCAL_NODE_START_PORT = process.env.LOCAL_NODE_START_PORT || 30212 export const LOCAL_NODE_PROXY_START_PORT = process.env.LOCAL_NODE_PROXY_START_PORT || 30313 -export const ACCOUNT_CREATE_BATCH_SIZE = process.env.ACCOUNT_CREATE_BATCH_SIZE || 50 +export const ACCOUNT_UPDATE_BATCH_SIZE = process.env.ACCOUNT_UPDATE_BATCH_SIZE || 10 + export const NODE_PROXY_USER_ID = process.env.NODE_PROXY_USER_ID || 'admin' export const NODE_PROXY_PASSWORD = process.env.NODE_PROXY_PASSWORD || 'adminpwd' @@ -85,7 +93,11 @@ export const POD_CONDITION_READY = 'Ready' export const POD_CONDITION_POD_SCHEDULED = 'PodScheduled' export const POD_CONDITION_STATUS_TRUE = 'True' -// Listr related +export const K8_COPY_FROM_RETRY_TIMES = process.env.K8_COPY_FROM_RETRY_TIMES || 5 +/** + * Listr related + * @return a object that defines the default color options + */ export const LISTR_DEFAULT_RENDERER_TIMER_OPTION = { ...PRESET_TIMER, condition: (duration) => duration > 100, @@ -103,16 +115,10 @@ export const LISTR_DEFAULT_RENDERER_OPTION = { timer: LISTR_DEFAULT_RENDERER_TIMER_OPTION } -export const KEY_FORMAT_PEM = 'pem' - -export const KEY_FORMAT_PFX = 'pfx' export const SIGNING_KEY_PREFIX = 's' -export const AGREEMENT_KEY_PREFIX = 'a' export const ENCRYPTION_KEY_PREFIX = 'e' export const CERTIFICATE_VALIDITY_YEARS = 100 // years -export const PUBLIC_PFX = 'public.pfx' - export const OS_WINDOWS = 'windows' export const OS_WIN32 = 'win32' export const OS_DARWIN = 'darwin' @@ -128,10 +134,22 @@ export const PROFILE_TINY = 'tiny' export const PROFILE_LOCAL = 'local' export const ALL_PROFILES = [PROFILE_LOCAL, PROFILE_TINY, PROFILE_SMALL, PROFILE_MEDIUM, PROFILE_LARGE] -export const DEFAULT_PROFILE_FILE = `${SOLO_CACHE_DIR}/profiles/custom-spec.yaml` +export const DEFAULT_PROFILE_FILE = path.join(SOLO_CACHE_DIR, 'profiles', 'custom-spec.yaml') // ------ Hedera SDK Related ------ export const NODE_CLIENT_MAX_ATTEMPTS = process.env.NODE_CLIENT_MAX_ATTEMPTS || 60 export const NODE_CLIENT_MIN_BACKOFF = process.env.NODE_CLIENT_MIN_BACKOFF || 1000 export const NODE_CLIENT_MAX_BACKOFF = process.env.NODE_CLIENT_MAX_BACKOFF || 1000 export const NODE_CLIENT_REQUEST_TIMEOUT = process.env.NODE_CLIENT_REQUEST_TIMEOUT || 120000 + +// ---- New Node Related ---- +export const ENDPOINT_TYPE_IP = 'IP' +export const ENDPOINT_TYPE_FQDN = 'FQDN' +export const DEFAULT_NETWORK_NODE_NAME = 'node1' + +// file-id must be between 0.0.150 and 0.0.159 +// file must be uploaded using FileUpdateTransaction in maximum of 5Kb chunks +export const UPGRADE_FILE_ID = FileId.fromString('0.0.150') +export const UPGRADE_FILE_CHUNK_SIZE = 1024 * 5 // 5Kb + +export const JVM_DEBUG_PORT = 5005 diff --git a/src/core/dependency_managers/dependency_manager.mjs b/src/core/dependency_managers/dependency_manager.mjs index aa7cc21c1..465158829 100644 --- a/src/core/dependency_managers/dependency_manager.mjs +++ b/src/core/dependency_managers/dependency_manager.mjs @@ -14,11 +14,16 @@ * limitations under the License. * */ +'use strict' import os from 'os' import { FullstackTestingError, MissingArgumentError } from '../errors.mjs' import { ShellRunner } from '../shell_runner.mjs' export class DependencyManager extends ShellRunner { + /** + * @param {Logger} logger + * @param {Map} depManagerMap + */ constructor (logger, depManagerMap) { if (!logger) throw new MissingArgumentError('an instance of core/Logger is required', logger) super(logger) @@ -28,8 +33,8 @@ export class DependencyManager extends ShellRunner { /** * Check if the required dependency is installed or not - * @param dep is the name of the program - * @param shouldInstall Whether or not install the dependency if not installed + * @param {string} dep - is the name of the program + * @param {boolean} [shouldInstall] - Whether or not install the dependency if not installed * @returns {Promise} */ async checkDependency (dep, shouldInstall = true) { @@ -49,7 +54,11 @@ export class DependencyManager extends ShellRunner { return true } - taskCheckDependencies (deps = []) { + /** + * @param {String[]} deps + * @param {Object[]} subTasks + */ + taskCheckDependencies (deps) { const subTasks = [] deps.forEach(dep => { subTasks.push({ diff --git a/src/core/dependency_managers/helm_dependency_manager.mjs b/src/core/dependency_managers/helm_dependency_manager.mjs index 6403d285c..4e55af2ae 100644 --- a/src/core/dependency_managers/helm_dependency_manager.mjs +++ b/src/core/dependency_managers/helm_dependency_manager.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import fs from 'fs' import os from 'os' import path from 'path' @@ -29,6 +30,8 @@ import { OS_WIN32, OS_WINDOWS } from '../constants.mjs' // constants required by HelmDependencyManager const HELM_RELEASE_BASE_URL = 'https://get.helm.sh' const HELM_ARTIFACT_TEMPLATE = 'helm-%s-%s-%s.%s' + +/** @type {Map} */ const HELM_ARTIFACT_EXT = new Map() .set(constants.OS_DARWIN, 'tar.gz') .set(constants.OS_LINUX, 'tar.gz') @@ -38,6 +41,15 @@ const HELM_ARTIFACT_EXT = new Map() * Helm dependency manager installs or uninstalls helm client at SOLO_HOME_DIR/bin directory */ export class HelmDependencyManager extends ShellRunner { + /** + * @param {PackageDownloader} downloader + * @param {Zippy} zippy + * @param {Logger} logger + * @param {string} [installationDir] + * @param {NodeJS.Platform} [osPlatform] + * @param {string} [osArch] + * @param {string} [helmVersion] + */ constructor ( downloader, zippy, @@ -73,17 +85,23 @@ export class HelmDependencyManager extends ShellRunner { this.checksumURL = `${HELM_RELEASE_BASE_URL}/${this.artifactName}.sha256sum` } + /** + * @returns {string} + */ getHelmPath () { return this.helmPath } + /** + * @returns {boolean} + */ isInstalled () { return fs.existsSync(this.helmPath) } /** * Uninstall helm from solo bin folder - * @return {Promise} + * @returns {Promise} */ async uninstall () { if (this.isInstalled()) { @@ -91,6 +109,10 @@ export class HelmDependencyManager extends ShellRunner { } } + /** + * @param {string} [tmpDir] + * @returns {Promise} + */ async install (tmpDir = helpers.getTmpDir()) { const extractedDir = path.join(tmpDir, 'extracted-helm') let helmSrc = path.join(extractedDir, `${this.osPlatform}-${this.osArch}`, constants.HELM) @@ -120,6 +142,10 @@ export class HelmDependencyManager extends ShellRunner { return this.isInstalled() } + /** + * @param {boolean} [shouldInstall] + * @returns {Promise} + */ async checkVersion (shouldInstall = true) { if (!this.isInstalled()) { if (shouldInstall) { @@ -135,6 +161,9 @@ export class HelmDependencyManager extends ShellRunner { return semver.gte(parts[0], version.HELM_VERSION) } + /** + * @returns {string} + */ getHelmVersion () { return version.HELM_VERSION } diff --git a/src/core/dependency_managers/keytool_dependency_manager.mjs b/src/core/dependency_managers/keytool_dependency_manager.mjs index d4ea45f8c..8a71b41d8 100644 --- a/src/core/dependency_managers/keytool_dependency_manager.mjs +++ b/src/core/dependency_managers/keytool_dependency_manager.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import fs from 'fs' import os from 'os' import path from 'path' @@ -31,6 +32,15 @@ import { OS_WIN32, OS_WINDOWS } from '../constants.mjs' * Installs or uninstalls JRE client at SOLO_HOME_DIR/bin/jre directory */ export class KeytoolDependencyManager extends ShellRunner { + /** + * @param {PackageDownloader} downloader + * @param {Zippy} zippy + * @param {Logger} logger + * @param {string} [installationDir] + * @param {NodeJS.Platform} [osPlatform] + * @param {string} [osArch] + * @param {string} [javaVersion] + */ constructor ( downloader, zippy, @@ -74,6 +84,10 @@ export class KeytoolDependencyManager extends ShellRunner { this.keytoolPath = Templates.installationPath(constants.KEYTOOL, this.osPlatform, this.installationDir) } + /** + * @returns {Promise} + * @private + */ async _fetchKeytoolArtifactUrl () { const keytoolRelease = `jdk-${this.javaVersion.major}.${this.javaVersion.minor}.${this.javaVersion.patch}%2B${this.javaVersion.build}` const adoptiumURL = `https://api.adoptium.net/v3/assets/release_name/eclipse/${keytoolRelease}?architecture=${this.osArch}&heap_size=normal&image_type=jre&os=${this.osPlatform}&project=jdk` @@ -81,17 +95,23 @@ export class KeytoolDependencyManager extends ShellRunner { return data.binaries[0].package } + /** + * @returns {string} + */ getKeytoolPath () { return this.keytoolPath } + /** + * @returns {boolean} + */ isInstalled () { return fs.existsSync(this.keytoolPath) } /** * Uninstall keytool from solo bin folder - * @return {Promise} + * @returns {Promise} */ async uninstall () { if (fs.existsSync(this.jreDir)) { @@ -99,6 +119,10 @@ export class KeytoolDependencyManager extends ShellRunner { } } + /** + * @param {string} [tmpDir] + * @returns {Promise} + */ async install (tmpDir = helpers.getTmpDir()) { const extractedDir = path.join(tmpDir, 'extracted-keytool') if (!this.keytoolPackage) { @@ -143,6 +167,10 @@ export class KeytoolDependencyManager extends ShellRunner { return this.isInstalled() } + /** + * @param {boolean} [shouldInstall] + * @returns {Promise} + */ async checkVersion (shouldInstall = true) { if (!this.isInstalled()) { if (shouldInstall) { @@ -158,6 +186,9 @@ export class KeytoolDependencyManager extends ShellRunner { return semver.gte(parts[1], version.JAVA_VERSION) } + /** + * @returns {Keytool} + */ getKeytool () { if (this.keytool) { return this.keytool @@ -167,6 +198,9 @@ export class KeytoolDependencyManager extends ShellRunner { return this.keytool } + /** + * @returns {string} + */ getKeytoolVersion () { return version.JAVA_VERSION } diff --git a/src/core/enumerations.mjs b/src/core/enumerations.mjs new file mode 100644 index 000000000..43370b2e4 --- /dev/null +++ b/src/core/enumerations.mjs @@ -0,0 +1,45 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +'use strict' + +export const NodeStatusCodes = { + NO_VALUE: 0, + STARTING_UP: 1, + ACTIVE: 2, + BEHIND: 4, + FREEZING: 5, + FREEZE_COMPLETE: 6, + REPLAYING_EVENTS: 7, + OBSERVING: 8, + CHECKING: 9, + RECONNECT_COMPLETE: 10, + CATASTROPHIC_FAILURE: 11 +} + +export const NodeStatusEnums = { + 0: 'NO_VALUE', + 1: 'STARTING_UP', + 2: 'ACTIVE', + 4: 'BEHIND', + 5: 'FREEZING', + 6: 'FREEZE_COMPLETE', + 7: 'REPLAYING_EVENTS', + 8: 'OBSERVING', + 9: 'CHECKING', + 10: 'RECONNECT_COMPLETE', + 11: 'CATASTROPHIC_FAILURE' +} diff --git a/src/core/errors.mjs b/src/core/errors.mjs index d20bd7b1e..e8e41a5c2 100644 --- a/src/core/errors.mjs +++ b/src/core/errors.mjs @@ -14,15 +14,17 @@ * limitations under the License. * */ +'use strict' + export class FullstackTestingError extends Error { /** * Create a custom error object * * error metadata will include the `cause` * - * @param message error message - * @param cause source error (if any) - * @param meta additional metadata (if any) + * @param {string} message error message + * @param {Error | Object} cause source error (if any) + * @param {Object} meta additional metadata (if any) */ constructor (message, cause = {}, meta = {}) { super(message) @@ -43,9 +45,9 @@ export class ResourceNotFoundError extends FullstackTestingError { * * error metadata will include `resource` * - * @param message error message - * @param resource name of the resource - * @param cause source error (if any) + * @param {string} message - error message + * @param {string} resource - name of the resource + * @param {Error|Object} cause - source error (if any) */ constructor (message, resource, cause = {}) { super(message, cause, { resource }) @@ -56,8 +58,8 @@ export class MissingArgumentError extends FullstackTestingError { /** * Create a custom error for missing argument scenario * - * @param message error message - * @param cause source error (if any) + * @param {string} message - error message + * @param {Error|Object} cause - source error (if any) */ constructor (message, cause = {}) { super(message, cause) @@ -70,9 +72,9 @@ export class IllegalArgumentError extends FullstackTestingError { * * error metadata will include `value` * - * @param message error message - * @param value value of the invalid argument - * @param cause source error (if any) + * @param {string} message - error message + * @param {*} value - value of the invalid argument + * @param {Error|Object} cause - source error (if any) */ constructor (message, value = '', cause = {}) { super(message, cause, { value }) @@ -85,10 +87,10 @@ export class DataValidationError extends FullstackTestingError { * * error metadata will include `expected` and `found` values. * - * @param message error message - * @param expected expected value - * @param found value found - * @param cause source error (if any) + * @param {string} message - error message + * @param {*} expected - expected value + * @param {*} found - value found + * @param {Error|Object} [cause] - source error (if any) */ constructor (message, expected, found, cause = {}) { super(message, cause, { expected, found }) diff --git a/src/core/helm.mjs b/src/core/helm.mjs index b0475be35..27e623df9 100644 --- a/src/core/helm.mjs +++ b/src/core/helm.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import os from 'os' import { constants } from './index.mjs' import { ShellRunner } from './shell_runner.mjs' @@ -21,6 +22,10 @@ import { Templates } from './templates.mjs' import { IllegalArgumentError } from './errors.mjs' export class Helm extends ShellRunner { + /** + * @param {Logger} logger + * @param {NodeJS.Platform} [osPlatform] + */ constructor (logger, osPlatform = os.platform()) { if (!logger) throw new IllegalArgumentError('an instance of core/Logger is required', logger) super(logger) @@ -30,8 +35,8 @@ export class Helm extends ShellRunner { /** * Prepare a `helm` shell command string - * @param action represents a helm command (e.g. create | install | get ) - * @param args args of the command + * @param {string} action - represents a helm command (e.g. create | install | get ) + * @param {string} args - args of the command * @returns {string} */ prepareCommand (action, ...args) { @@ -42,7 +47,7 @@ export class Helm extends ShellRunner { /** * Invoke `helm install` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async install (...args) { @@ -51,7 +56,7 @@ export class Helm extends ShellRunner { /** * Invoke `helm uninstall` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async uninstall (...args) { @@ -60,7 +65,7 @@ export class Helm extends ShellRunner { /** * Invoke `helm upgrade` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async upgrade (...args) { @@ -69,7 +74,7 @@ export class Helm extends ShellRunner { /** * Invoke `helm list` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async list (...args) { @@ -78,8 +83,8 @@ export class Helm extends ShellRunner { /** * Invoke `helm dependency` command - * @param subCommand sub-command - * @param args args of the command + * @param {string} subCommand - sub-command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async dependency (subCommand, ...args) { @@ -88,8 +93,8 @@ export class Helm extends ShellRunner { /** * Invoke `helm repo` command - * @param subCommand sub-command - * @param args args of the command + * @param {string} subCommand - sub-command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async repo (subCommand, ...args) { @@ -98,7 +103,8 @@ export class Helm extends ShellRunner { /** * Get helm version - * @return {Promise} + * @param {string[]} args + * @returns {Promise} */ async version (args = ['--short']) { return this.run(this.prepareCommand('version', ...args)) diff --git a/src/core/helpers.mjs b/src/core/helpers.mjs index 2c99fd0d8..7c3d372cb 100644 --- a/src/core/helpers.mjs +++ b/src/core/helpers.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import fs from 'fs' import os from 'os' import path from 'path' @@ -25,49 +26,75 @@ import * as semver from 'semver' import { Templates } from './templates.mjs' import { HEDERA_HAPI_PATH, ROOT_CONTAINER, SOLO_LOGS_DIR } from './constants.mjs' import { constants } from './index.mjs' +import { FileContentsQuery, FileId } from '@hashgraph/sdk' // cache current directory const CUR_FILE_DIR = paths.dirname(fileURLToPath(import.meta.url)) +/** + * @param {number} ms + * @returns {Promise} + */ export function sleep (ms) { return new Promise((resolve) => { setTimeout(resolve, ms) }) } +/** + * @param {string} input + * @returns {string[]} + */ export function parseNodeIds (input) { + return splitFlagInput(input, ',') +} + +/** + * @param {string} input + * @param {string} separator + * @returns {string[]} + */ +export function splitFlagInput (input, separator = ',') { if (typeof input === 'string') { - const nodeIds = [] - input.split(',').forEach(item => { - const nodeId = item.trim() - if (nodeId) { - nodeIds.push(nodeId) + const items = [] + input.split(separator).forEach(s => { + const item = s.trim() + if (s) { + items.push(item) } }) - return nodeIds + return items } - throw new FullstackTestingError('node IDs is not a comma separated string') + throw new FullstackTestingError('input is not a comma separated string') } +/** + * @template T + * @param {T[]} arr - The array to be cloned + * @returns {T[]} A new array with the same elements as the input array + */ export function cloneArray (arr) { return JSON.parse(JSON.stringify(arr)) } /** * load package.json - * @returns {any} + * @returns {*} */ export function loadPackageJSON () { try { - const raw = fs.readFileSync(`${CUR_FILE_DIR}/../../package.json`) + const raw = fs.readFileSync(path.join(CUR_FILE_DIR, '..', '..', 'package.json')) return JSON.parse(raw.toString()) } catch (e) { throw new FullstackTestingError('failed to load package.json', e) } } +/** + * @returns {string} + */ export function packageVersion () { const packageJson = loadPackageJSON() return packageJson.version @@ -75,8 +102,8 @@ export function packageVersion () { /** * Return the required root image for a platform version - * @param releaseTag platform version - * @return {string} + * @param {string} releaseTag - platform version + * @returns {string} */ export function getRootImageRepository (releaseTag) { const releaseVersion = semver.parse(releaseTag, { includePrerelease: true }) @@ -87,10 +114,19 @@ export function getRootImageRepository (releaseTag) { return 'hashgraph/full-stack-testing/ubi8-init-java21' } +/** + * @returns {string} + */ export function getTmpDir () { return fs.mkdtempSync(path.join(os.tmpdir(), 'solo-')) } +/** + * @param {string} destDir + * @param {string} prefix + * @param {Date} curDate + * @returns {string} + */ export function createBackupDir (destDir, prefix = 'backup', curDate = new Date()) { const dateDir = util.format('%s%s%s_%s%s%s', curDate.getFullYear(), @@ -109,6 +145,10 @@ export function createBackupDir (destDir, prefix = 'backup', curDate = new Date( return backupDir } +/** + * @param {Map} [fileMap] + * @param {boolean} removeOld + */ export function makeBackup (fileMap = new Map(), removeOld = true) { for (const entry of fileMap) { const srcPath = entry[0] @@ -122,29 +162,19 @@ export function makeBackup (fileMap = new Map(), removeOld = true) { } } -export function backupOldPfxKeys (nodeIds, keysDir, curDate = new Date(), dirPrefix = 'gossip-pfx') { - const backupDir = createBackupDir(keysDir, `unused-${dirPrefix}`, curDate) - const fileMap = new Map() - for (const nodeId of nodeIds) { - const srcPath = path.join(keysDir, `private-${nodeId}.pfx`) - const destPath = path.join(backupDir, `private-${nodeId}.pfx`) - fileMap.set(srcPath, destPath) - } - - const srcPath = path.join(keysDir, 'public.pfx') - const destPath = path.join(backupDir, 'public.pfx') - fileMap.set(srcPath, destPath) - makeBackup(fileMap, true) - - return backupDir -} - +/** + * @param {string[]} nodeIds + * @param {string} keysDir + * @param {Date} curDate + * @param {string} dirPrefix + * @returns {string} + */ export function backupOldTlsKeys (nodeIds, keysDir, curDate = new Date(), dirPrefix = 'tls') { const backupDir = createBackupDir(keysDir, `unused-${dirPrefix}`, curDate) const fileMap = new Map() for (const nodeId of nodeIds) { const srcPath = path.join(keysDir, Templates.renderTLSPemPrivateKeyFile(nodeId)) - const destPath = path.join(backupDir, Templates.renderTLSPemPublicKeyFile(nodeId)) + const destPath = path.join(backupDir, Templates.renderTLSPemPrivateKeyFile(nodeId)) fileMap.set(srcPath, destPath) } @@ -153,12 +183,19 @@ export function backupOldTlsKeys (nodeIds, keysDir, curDate = new Date(), dirPre return backupDir } +/** + * @param {string[]} nodeIds + * @param {string} keysDir + * @param {Date} curDate + * @param {string} dirPrefix + * @returns {string} + */ export function backupOldPemKeys (nodeIds, keysDir, curDate = new Date(), dirPrefix = 'gossip-pem') { const backupDir = createBackupDir(keysDir, `unused-${dirPrefix}`, curDate) const fileMap = new Map() for (const nodeId of nodeIds) { const srcPath = path.join(keysDir, Templates.renderGossipPemPrivateKeyFile(nodeId)) - const destPath = path.join(backupDir, Templates.renderGossipPemPublicKeyFile(nodeId)) + const destPath = path.join(backupDir, Templates.renderGossipPemPrivateKeyFile(nodeId)) fileMap.set(srcPath, destPath) } @@ -167,6 +204,10 @@ export function backupOldPemKeys (nodeIds, keysDir, curDate = new Date(), dirPre return backupDir } +/** + * @param {string} str + * @returns {boolean} + */ export function isNumeric (str) { if (typeof str !== 'string') return false // we only process strings! return !isNaN(str) && // use type coercion to parse the _entirety_ of the string (`parseFloat` alone does not do this)... @@ -175,8 +216,8 @@ export function isNumeric (str) { /** * Validate a path provided by the user to prevent path traversal attacks - * @param input the input provided by the user - * @returns {*} a validated path + * @param {string} input - the input provided by the user + * @returns {string} a validated path */ export function validatePath (input) { if (input.indexOf('\0') !== -1) { @@ -188,36 +229,45 @@ export function validatePath (input) { /** * Download logs files from all network pods and save to local solo log directory * an instance of core/K8 - * @param {K8} k8 an instance of core/K8 - * @param {string} namespace the namespace of the network + * @param {K8} k8 - an instance of core/K8 + * @param {string} namespace - the namespace of the network * @returns {Promise} A promise that resolves when the logs are downloaded */ export async function getNodeLogs (k8, namespace) { + k8.logger.debug('getNodeLogs: begin...') const pods = await k8.getPodsByLabel(['fullstack.hedera.com/type=network-node']) + const timeString = new Date().toISOString().replace(/:/g, '-').replace(/\./g, '-') + for (const pod of pods) { const podName = pod.metadata.name - const targetDir = `${SOLO_LOGS_DIR}/${namespace}/${podName}` + const targetDir = path.join(SOLO_LOGS_DIR, namespace, timeString) try { - if (fs.existsSync(targetDir)) { - fs.rmdirSync(targetDir, { recursive: true }) + if (!fs.existsSync(targetDir)) { + fs.mkdirSync(targetDir, { recursive: true }) } - fs.mkdirSync(targetDir, { recursive: true }) - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/output/swirlds.log`, targetDir) - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/output/hgcaa.log`, targetDir) - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, targetDir) - await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/settings.txt`, targetDir) + const scriptName = 'support-zip.sh' + const sourcePath = path.join(constants.RESOURCES_DIR, scriptName) // script source path + await k8.copyTo(podName, ROOT_CONTAINER, sourcePath, `${HEDERA_HAPI_PATH}`) + await k8.execContainer(podName, ROOT_CONTAINER, `chmod 0755 ${HEDERA_HAPI_PATH}/${scriptName}`) + await k8.execContainer(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/${scriptName}`) + await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/${podName}.zip`, targetDir) } catch (e) { // not throw error here, so we can continue to finish downloading logs from other pods // and also delete namespace in the end k8.logger.error(`failed to download logs from pod ${podName}`, e) } + k8.logger.debug('getNodeLogs: ...end') } } -// a function generate map between the nodeId and their account ids +/** + * Create a map of node IDs to account IDs + * @param {string[]} nodeIDs + * @returns {Map} the map of node IDs to account IDs + */ export function getNodeAccountMap (nodeIDs) { - const accountMap = new Map() + const accountMap = /** @type {Map} **/ new Map() const realm = constants.HEDERA_NODE_ACCOUNT_ID_START.realm const shard = constants.HEDERA_NODE_ACCOUNT_ID_START.shard let accountId = constants.HEDERA_NODE_ACCOUNT_ID_START.num @@ -228,3 +278,78 @@ export function getNodeAccountMap (nodeIDs) { }) return accountMap } + +/** + * @param {AccountManager} accountManager + * @param {string} namespace + * @param {number} fileNum + * @returns {Promise} + */ +export async function getFileContents (accountManager, namespace, fileNum) { + await accountManager.loadNodeClient(namespace) + const client = accountManager._nodeClient + const fileId = FileId.fromString(`0.0.${fileNum}`) + const queryFees = new FileContentsQuery().setFileId(fileId) + return Buffer.from(await queryFees.execute(client)).toString('hex') +} + +/** + * @param {Array} envVarArray + * @param {string} name + * @returns {string|null} + */ +export function getEnvValue (envVarArray, name) { + const kvPair = envVarArray.find(v => v.startsWith(`${name}=`)) + return kvPair ? kvPair.split('=')[1] : null +} + +/** + * @param {string} ipAddress + * @returns {Uint8Array} + */ +export function parseIpAddressToUint8Array (ipAddress) { + const parts = ipAddress.split('.') + const uint8Array = new Uint8Array(4) + + for (let i = 0; i < 4; i++) { + uint8Array[i] = parseInt(parts[i], 10) + } + + return uint8Array +} + +/** + * If the basename of the src did not match expected basename, rename it first, then copy to destination + * @param srcFilePath + * @param expectedBaseName + * @param destDir + */ +export function renameAndCopyFile (srcFilePath, expectedBaseName, destDir) { + const srcDir = path.dirname(srcFilePath) + if (path.basename(srcFilePath) !== expectedBaseName) { + fs.renameSync(srcFilePath, path.join(srcDir, expectedBaseName)) + } + // copy public key and private key to key directory + fs.copyFile(path.join(srcDir, expectedBaseName), path.join(destDir, expectedBaseName), (err) => { + if (err) { + self.logger.error(`Error copying file: ${err.message}`) + throw new FullstackTestingError(`Error copying file: ${err.message}`) + } + }) +} + +/** + * Add debug options to valuesArg used by helm chart + * @param valuesArg the valuesArg to update + * @param debugNodeId the node ID to attach the debugger to + * @param index the index of extraEnv to add the debug options to + * @returns updated valuesArg + */ +export function addDebugOptions (valuesArg, debugNodeId, index = 0) { + if (debugNodeId) { + const nodeId = Templates.nodeNumberFromNodeId(debugNodeId) - 1 + valuesArg += ` --set "hedera.nodes[${nodeId}].root.extraEnv[${index}].name=JAVA_OPTS"` + valuesArg += ` --set "hedera.nodes[${nodeId}].root.extraEnv[${index}].value=-agentlib:jdwp=transport=dt_socket\\,server=y\\,suspend=y\\,address=*:${constants.JVM_DEBUG_PORT}"` + } + return valuesArg +} diff --git a/src/core/k8.mjs b/src/core/k8.mjs index 026c28b13..9338c1ce5 100644 --- a/src/core/k8.mjs +++ b/src/core/k8.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import * as k8s from '@kubernetes/client-node' import fs from 'fs' import net from 'net' @@ -27,6 +28,7 @@ import { v4 as uuid4 } from 'uuid' import { V1ObjectMeta, V1Secret } from '@kubernetes/client-node' import { sleep } from './helpers.mjs' import { constants } from './index.mjs' +import * as stream from 'node:stream' /** * A kubernetes API wrapper class providing custom functionalities required by solo @@ -37,6 +39,10 @@ import { constants } from './index.mjs' export class K8 { static PodReadyCondition = new Map().set(constants.POD_CONDITION_READY, constants.POD_CONDITION_STATUS_TRUE) + /** + * @param {ConfigManager} configManager + * @param {Logger} logger + */ constructor (configManager, logger) { if (!configManager) throw new MissingArgumentError('An instance of core/ConfigManager is required') if (!logger) throw new MissingArgumentError('An instance of core/Logger is required') @@ -51,17 +57,23 @@ export class K8 { * Clone a new instance with the same config manager and logger * Internally it instantiates a new kube API client * - * @return {K8} + * @returns {K8} */ clone () { const c = new K8(this.configManager, this.logger) return c.init() } + /** + * @returns {k8s.KubeConfig} + */ getKubeConfig () { return this.kubeConfig } + /** + * @returns {K8} + */ init () { this.kubeConfig = new k8s.KubeConfig() this.kubeConfig.loadFromDefault() @@ -83,9 +95,9 @@ export class K8 { /** * Apply filters to metadata - * @param items list of items - * @param filters an object with metadata fields and value - * @return a list of items that match the filters + * @param {Object[]} items - list of items + * @param {Object} [filters] - an object with metadata fields and value + * @returns {Object[]} a list of items that match the filters */ applyMetadataFilter (items, filters = {}) { if (!filters) throw new MissingArgumentError('filters are required') @@ -115,9 +127,9 @@ export class K8 { /** * Filter a single item using metadata filter - * @param items list of items - * @param filters an object with metadata fields and value - * @return {*} + * @param {Object[]} items - list of items + * @param {Object} [filters] - an object with metadata fields and value + * @returns {Object} */ filterItem (items, filters = {}) { const filtered = this.applyMetadataFilter(items, filters) @@ -127,8 +139,8 @@ export class K8 { /** * Create a new namespace - * @param name name of the namespace - * @return {Promise} + * @param {string} name - name of the namespace + * @returns {Promise} */ async createNamespace (name) { const payload = { @@ -143,8 +155,8 @@ export class K8 { /** * Delete a namespace - * @param name name of the namespace - * @return {Promise} + * @param {string} name - name of the namespace + * @returns {Promise} */ async deleteNamespace (name) { const resp = await this.kubeClient.deleteNamespace(name) @@ -153,7 +165,7 @@ export class K8 { /** * Get a list of namespaces - * @return list of namespaces + * @returns {string[]} list of namespaces */ async getNamespaces () { const resp = await this.kubeClient.listNamespace() @@ -171,8 +183,8 @@ export class K8 { /** * Returns true if a namespace exists with the given name - * @param namespace namespace name - * @return {Promise} + * @param {string} namespace namespace name + * @returns {Promise} */ async hasNamespace (namespace) { const namespaces = await this.getNamespaces() @@ -181,8 +193,8 @@ export class K8 { /** * Get a podName by name - * @param name podName name - * @return {Promise<{}>} k8s.V1Pod object + * @param {string} name - podName name + * @returns {Promise} k8s.V1Pod object */ async getPodByName (name) { const ns = this._getNamespace() @@ -200,8 +212,8 @@ export class K8 { /** * Get pods by labels - * @param labels list of labels - * @return {Promise>} + * @param {string[]} labels - list of labels + * @returns {Promise>} */ async getPodsByLabel (labels = []) { const ns = this._getNamespace() @@ -220,8 +232,8 @@ export class K8 { /** * Get secrets by labels - * @param labels list of labels - * @return {Promise>} + * @param {string[]} labels - list of labels + * @returns {Promise>} */ async getSecretsByLabel (labels = []) { const ns = this._getNamespace() @@ -240,7 +252,7 @@ export class K8 { /** * Get host IP of a podName - * @param podNameName name of the podName + * @param {string} podNameName - name of the podName * @returns {Promise} podName IP */ async getPodIP (podNameName) { @@ -256,8 +268,8 @@ export class K8 { /** * Get a svc by name - * @param name svc name - * @return {Promise<{}>} k8s.V1Service object + * @param {string} name - svc name + * @returns {Promise} k8s.V1Service object */ async getSvcByName (name) { const ns = this._getNamespace() @@ -275,7 +287,7 @@ export class K8 { /** * Get cluster IP of a service - * @param svcName name of the service + * @param {string} svcName - name of the service * @returns {Promise} cluster IP */ async getClusterIP (svcName) { @@ -289,7 +301,7 @@ export class K8 { /** * Get a list of clusters - * @return a list of cluster names + * @returns {string[]} a list of cluster names */ async getClusters () { const clusters = [] @@ -302,7 +314,7 @@ export class K8 { /** * Get a list of contexts - * @return a list of context names + * @returns {string[]} a list of context names */ async getContexts () { const contexts = [] @@ -327,11 +339,11 @@ export class K8 { * name: config.txt * }] * - * @param podName pod name - * @param containerName container name - * @param destPath path inside the container - * @param timeout timeout in ms - * @return {Promise<{}>} + * @param {string} podName + * @param {string} containerName + * @param {string} destPath - path inside the container + * @param {number} [timeout] - timeout in ms + * @returns a promise that returns array of directory entries, custom object */ async listDir (podName, containerName, destPath, timeout = 5000) { try { @@ -344,8 +356,13 @@ export class K8 { for (let line of lines) { line = line.replace(/\s+/g, '|') const parts = line.split('|') - if (parts.length === 9) { - const name = parts[parts.length - 1] + if (parts.length >= 9) { + let name = parts[parts.length - 1] + // handle unique file format (without single quotes): 'usedAddressBook_vHederaSoftwareVersion{hapiVersion=v0.53.0, servicesVersion=v0.53.0}_2024-07-30-20-39-06_node_0.txt.debug' + for (let i = parts.length - 1; i > 8; i--) { + name = `${parts[i - 1]} ${name}` + } + if (name !== '.' && name !== '..') { const permission = parts[0] const item = { @@ -370,11 +387,11 @@ export class K8 { /** * Check if a filepath exists in the container - * @param podName pod name - * @param containerName container name - * @param destPath path inside the container - * @param filters an object with metadata fields and value - * @return {Promise} + * @param {string} podName + * @param {string} containerName + * @param {string} destPath - path inside the container + * @param {Object} [filters] - an object with metadata fields and value + * @returns {Promise} */ async hasFile (podName, containerName, destPath, filters = {}) { const parentDir = path.dirname(destPath) @@ -415,10 +432,10 @@ export class K8 { /** * Check if a directory path exists in the container - * @param podName pod name - * @param containerName container name - * @param destPath path inside the container - * @return {Promise} + * @param {string} podName + * @param {string} containerName + * @param {string} destPath - path inside the container + * @returns {Promise} */ async hasDir (podName, containerName, destPath) { return await this.execContainer( @@ -428,6 +445,12 @@ export class K8 { ) === 'true' } + /** + * @param {string} podName + * @param {string} containerName + * @param {string} destPath + * @returns {Promise} + */ async mkdir (podName, containerName, destPath) { return this.execContainer( podName, @@ -441,11 +464,11 @@ export class K8 { * * It overwrites any existing file inside the container at the destination directory * - * @param podName podName name - * @param containerName container name - * @param srcPath source file path in the local - * @param destDir destination directory in the container - * @returns return a Promise that performs the copy operation + * @param {string} podName + * @param {string} containerName + * @param {string} srcPath - source file path in the local + * @param {string} destDir - destination directory in the container + * @returns {Promise} return a Promise that performs the copy operation */ async copyTo (podName, containerName, srcPath, destDir) { const namespace = this._getNamespace() @@ -507,20 +530,28 @@ export class K8 { * * It overwrites any existing file at the destination directory * - * @param podName podName name - * @param containerName container name - * @param srcPath source file path in the container - * @param destDir destination directory in the local + * @param {string} podName + * @param {string} containerName + * @param {string} srcPath - source file path in the container + * @param {string} destDir - destination directory in the local * @returns {Promise} */ async copyFrom (podName, containerName, srcPath, destDir) { const namespace = this._getNamespace() // get stat for source file in the container - const entries = await this.listDir(podName, containerName, srcPath) + let entries = await this.listDir(podName, containerName, srcPath) if (entries.length !== 1) { throw new FullstackTestingError(`invalid source path: ${srcPath}`) } + // handle symbolic link + if (entries[0].name.indexOf(' -> ') > -1) { + const redirectSrcPath = path.join(path.dirname(srcPath), entries[0].name.substring(entries[0].name.indexOf(' -> ') + 4)) + entries = await this.listDir(podName, containerName, redirectSrcPath) + if (entries.length !== 1) { + throw new FullstackTestingError(`invalid source path: ${redirectSrcPath}`) + } + } const srcFileDesc = entries[0] // cache for later comparison after copy if (!fs.existsSync(destDir)) { @@ -530,9 +561,9 @@ export class K8 { try { const srcFileSize = Number.parseInt(srcFileDesc.size) - const srcFile = path.basename(srcPath) - const srcDir = path.dirname(srcPath) - const destPath = `${destDir}/${srcFile}` + const srcFile = path.basename(entries[0].name) + const srcDir = path.dirname(entries[0].name) + const destPath = path.join(destDir, srcFile) // download the tar file to a temp location const tmpFile = this._tempFileFor(srcFile) @@ -540,67 +571,147 @@ export class K8 { const self = this return new Promise((resolve, reject) => { const execInstance = new k8s.Exec(this.kubeConfig) - const command = ['tar', 'zcf', '-', '-C', srcDir, srcFile] - const writerStream = fs.createWriteStream(tmpFile) - const errStream = new sb.WritableStreamBuffer() + const command = ['cat', `${srcDir}/${srcFile}`] + const outputFileStream = fs.createWriteStream(tmpFile) + const outputPassthroughStream = new stream.PassThrough({ highWaterMark: 10 * 1024 * 1024 }) + const errStream = new stream.PassThrough() + let additionalErrorMessageDetail = '' + + // Use pipe() to automatically handle backpressure between streams + outputPassthroughStream.pipe(outputFileStream) + + outputPassthroughStream.on('data', (chunk) => { + this.logger.debug(`received chunk size=${chunk.length}`) + const canWrite = outputFileStream.write(chunk) // Write chunk to file and check if buffer is full + + if (!canWrite) { + console.log(`Buffer is full, pausing data stream... for copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + outputPassthroughStream.pause() // Pause the data stream if buffer is full + } + }) + + outputFileStream.on('drain', () => { + outputPassthroughStream.resume() + this.logger.debug(`stream drained, resume write for copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + }) execInstance.exec( namespace, podName, containerName, command, - writerStream, + outputFileStream, errStream, null, false, - async ({ status }) => { - writerStream.close() - if (status === 'Failure' || errStream.size()) { + ({ status }) => { + if (status === 'Failure') { self._deleteTempFile(tmpFile) + const errorMessage = `tar command failed with status Failure while copying from ${podName}:${srcDir}/${srcFile} to ${destPath}` + this.logger.error(errorMessage) + return reject(new FullstackTestingError(errorMessage)) } + this.logger.debug(`copyFrom.callback(status)=${status}`) }) .then(conn => { - conn.on('close', async (code, reason) => { + conn.on('error', (e) => { + self._deleteTempFile(tmpFile) + return reject(new FullstackTestingError( + `failed copying from ${podName}:${srcDir}/${srcFile} to ${destPath} because of connection error: ${e.message}`, e)) + }) + + conn.on('close', (code, reason) => { + this.logger.debug(`connection closed copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) if (code !== 1000) { // code 1000 is the success code - return reject(new FullstackTestingError(`failed to copy because of error (${code}): ${reason}`)) + const errorMessage = `failed copying from ${podName}:${srcDir}/${srcFile} to ${destPath} because of error (${code}): ${reason}` + this.logger.error(errorMessage) + return reject(new FullstackTestingError(errorMessage)) } - // extract the downloaded file - await tar.x({ - file: tmpFile, - cwd: destDir - }) + outputFileStream.end() + outputFileStream.close(() => { + this.logger.debug(`finished closing writerStream copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) - self._deleteTempFile(tmpFile) + try { + fs.copyFileSync(tmpFile, destPath) - const stat = fs.statSync(destPath) - if (stat && stat.size === srcFileSize) { - return resolve(true) - } + self._deleteTempFile(tmpFile) - return reject(new FullstackTestingError(`failed to download file completely: ${destPath}`)) - }) + const stat = fs.statSync(destPath) + let rejection + if (stat && stat.size === srcFileSize) { + this.logger.info(`Finished successfully copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + } else { + rejection = true + if (!stat) { + additionalErrorMessageDetail = ', statSync returned no file status for the destination file' + } else { + additionalErrorMessageDetail = `, stat.size=${stat.size} != srcFileSize=${srcFileSize}` + } + } - conn.on('error', (e) => { - self._deleteTempFile(tmpFile) - return reject(new FullstackTestingError( - `failed to copy file ${destPath} because of connection error: ${e.message}`, e)) + if (rejection) { + const errorMessage = `failed copying from ${podName}:${srcDir}/${srcFile} to ${destPath} to download file completely: ${destPath}${additionalErrorMessageDetail}` + this.logger.error(errorMessage) + return reject(new FullstackTestingError(errorMessage)) + } else { + return resolve(true) + } + } catch (e) { + const errorMessage = `failed to complete copying from ${podName}:${srcDir}/${srcFile} to ${destPath} to extract file: ${destPath}` + this.logger.error(errorMessage, e) + return reject(new FullstackTestingError(errorMessage, e)) + } + }) }) }) + + errStream.on('data', (data) => { + const errorMessage = `error encountered copying from ${podName}:${srcDir}/${srcFile} to ${destPath}, error: ${data.toString()}` + this.logger.error(errorMessage) + return reject(new FullstackTestingError(errorMessage)) + }) + + outputFileStream.on('close', () => { + this.logger.debug(`finished copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + }) + + outputFileStream.on('error', (err) => { + const errorMessage = `writerStream error encountered copying from ${podName}:${srcDir}/${srcFile} to ${destPath}, err: ${err.toString()}` + this.logger.error(errorMessage, err) + return reject(new FullstackTestingError(errorMessage, err)) + }) + + outputFileStream.on('end', () => { + this.logger.debug(`writerStream has ended for copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + }) + + outputPassthroughStream.on('end', () => { + this.logger.debug(`writerPassthroughStream has ended for copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + }) + + outputFileStream.on('finish', () => { + this.logger.debug(`stopping copy, writerStream has finished for copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + }) + + outputPassthroughStream.on('finish', () => { + this.logger.debug(`stopping copy, writerPassthroughStream has finished for copying from ${podName}:${srcDir}/${srcFile} to ${destPath}`) + }) }) } catch (e) { - throw new FullstackTestingError( - `failed to download file from ${podName}:${containerName} [${srcPath} -> ${destDir}]: ${e.message}`, e) + const errorMessage = `failed to download file from ${podName}:${containerName} [${srcPath} -> ${destDir}]: ${e.message}` + this.logger.error(errorMessage, e) + throw new FullstackTestingError(errorMessage, e) } } /** - * Invoke bash command within a container and return the console output as string + * Invoke sh command within a container and return the console output as string * - * @param podName pod name - * @param containerName container name - * @param command bash commands as an array to be run within the containerName (e.g 'ls -la /opt/hgcapp') - * @param timeoutMs timout in milliseconds + * @param {string} podName + * @param {string} containerName + * @param {string|string[]} command - sh commands as an array to be run within the containerName (e.g 'ls -la /opt/hgcapp') + * @param {number} [timeoutMs] - timout in milliseconds * @returns {Promise} console output as string */ async execContainer (podName, containerName, command, timeoutMs = 1000) { @@ -651,9 +762,10 @@ export class K8 { * This simple server just forwards traffic from itself to a service running in kubernetes * -> localhost:localPort -> port-forward-tunnel -> kubernetes-pod:targetPort * - * @param podName pod name - * @param localPort local port - * @param podPort port of the pod + * @param {string} podName + * @param {number} localPort + * @param {number} podPort + * @returns {Promise} */ async portForward (podName, localPort, podPort) { const ns = this._getNamespace() @@ -671,8 +783,8 @@ export class K8 { /** * to test the connection to a pod within the network - * @param host the host of the target connection - * @param port the port of the target connection + * @param {string} host - the host of the target connection + * @param {number} port - the port of the target connection * @returns {Promise} */ async testConnection (host, port) { @@ -696,10 +808,10 @@ export class K8 { /** * Stop the port forwarder server * - * @param server an instance of server returned by portForward method - * @param maxAttempts the maximum number of attempts to check if the server is stopped - * @param timeout the delay between checks in milliseconds - * @return {Promise} + * @param {net.Server} server - an instance of server returned by portForward method + * @param {number} [maxAttempts] - the maximum number of attempts to check if the server is stopped + * @param {number} [timeout] - the delay between checks in milliseconds + * @returns {Promise} */ async stopPortForward (server, maxAttempts = 20, timeout = 500) { if (!server) { @@ -769,13 +881,13 @@ export class K8 { /** * Wait for pod - * @param phases an array of acceptable phases of the pods - * @param labels pod labels - * @param podCount number of pod expected - * @param maxAttempts maximum attempts to check - * @param delay delay between checks in milliseconds - * @param podItemPredicate a predicate function to check the pod item - * @return a Promise that checks the status of an array of pods + * @param {string[]} [phases] - an array of acceptable phases of the pods + * @param {string[]} [labels] - pod labels + * @param {number} [podCount] - number of pod expected + * @param {number} [maxAttempts] - maximum attempts to check + * @param {number} [delay] - delay between checks in milliseconds + * @param {Function} podItemPredicate - a predicate function to check the pod item + * @returns {Promise} a Promise that checks the status of an array of pods */ async waitForPods (phases = [constants.POD_PHASE_RUNNING], labels = [], podCount = 1, maxAttempts = 10, delay = 500, podItemPredicate) { const ns = this._getNamespace() @@ -833,11 +945,11 @@ export class K8 { /** * Check if pod is ready - * @param labels pod labels - * @param podCount number of pod expected - * @param maxAttempts maximum attempts to check - * @param delay delay between checks in milliseconds - * @return {Promise} + * @param {string[]} [labels] - pod labels + * @param {number} [podCount] - number of pod expected + * @param {number} [maxAttempts] - maximum attempts to check + * @param {number} [delay] - delay between checks in milliseconds + * @returns {Promise} */ async waitForPodReady (labels = [], podCount = 1, maxAttempts = 10, delay = 500) { try { @@ -849,14 +961,13 @@ export class K8 { /** * Check pods for conditions - * @param conditionsMap a map of conditions and values - * @param labels pod labels - * @param podCount number of pod expected - * @param maxAttempts maximum attempts to check - * @param delay delay between checks in milliseconds - * @return {Promise} + * @param {Map} conditionsMap - a map of conditions and values + * @param {string[]} [labels] - pod labels + * @param {number} [podCount] - number of pod expected + * @param {number} [maxAttempts] - maximum attempts to check + * @param {number} [delay] - delay between checks in milliseconds + * @returns {Promise} */ - async waitForPodConditions ( conditionsMap, labels = [], @@ -884,9 +995,9 @@ export class K8 { /** * Get a list of persistent volume claim names for the given namespace - * @param namespace the namespace of the persistent volume claims to return - * @param labels labels - * @returns return list of persistent volume claim names + * @param {string} namespace - the namespace of the persistent volume claims to return + * @param {string[]} [labels] - labels + * @returns {Promise} return list of persistent volume claim names */ async listPvcsByNamespace (namespace, labels = []) { const pvcs = [] @@ -909,9 +1020,9 @@ export class K8 { /** * Get a list of secrets for the given namespace - * @param namespace the namespace of the secrets to return - * @param labels labels - * @returns return list of secret names + * @param {string} namespace - the namespace of the secrets to return + * @param {string[]} [labels] - labels + * @returns {Promise} return list of secret names */ async listSecretsByNamespace (namespace, labels = []) { const secrets = [] @@ -934,8 +1045,8 @@ export class K8 { /** * Delete a persistent volume claim - * @param name the name of the persistent volume claim to delete - * @param namespace the namespace of the persistent volume claim to delete + * @param {string} name - the name of the persistent volume claim to delete + * @param {string} namespace - the namespace of the persistent volume claim to delete * @returns {Promise} true if the persistent volume claim was deleted */ async deletePvc (name, namespace) { @@ -949,10 +1060,10 @@ export class K8 { /** * retrieve the secret of the given namespace and label selector, if there is more than one, it returns the first - * @param namespace the namespace of the secret to search for - * @param labelSelector the label selector used to fetch the Kubernetes secret - * @returns a custom secret object with the relevant attributes, the values of the data key:value pair - * objects must be base64 decoded + * @param {string} namespace - the namespace of the secret to search for + * @param {string} labelSelector - the label selector used to fetch the Kubernetes secret + * @returns {Promise<{name: string, labels: Object, namespace: string, type: string, data: Object} | null>} a custom + * secret object with the relevant attributes, the values of the data key:value pair objects must be base64 decoded */ async getSecret (namespace, labelSelector) { const result = await this.kubeClient.listNamespacedSecret( @@ -973,19 +1084,19 @@ export class K8 { /** * creates a new Kubernetes secret with the provided attributes - * @param name the name of the new secret - * @param namespace the namespace to store the secret - * @param secretType the secret type - * @param data the secret, any values of a key:value pair must be base64 encoded - * @param labels the label to use for future label selector queries - * @param recreate if we should first run delete in the case that there the secret exists from a previous install + * @param {string} name - the name of the new secret + * @param {string} namespace - the namespace to store the secret + * @param {string} secretType - the secret type + * @param {Object} data - the secret, any values of a key:value pair must be base64 encoded + * @param {*} labels - the label to use for future label selector queries + * @param {boolean} recreate - if we should first run delete in the case that there the secret exists from a previous install * @returns {Promise} whether the secret was created successfully */ async createSecret (name, namespace, secretType, data, labels, recreate) { if (recreate) { try { await this.kubeClient.deleteNamespacedSecret(name, namespace) - } catch (e) { + } catch { // do nothing } } @@ -1010,8 +1121,8 @@ export class K8 { /** * delete a secret from the namespace - * @param name the name of the new secret - * @param namespace the namespace to store the secret + * @param {string} name - the name of the new secret + * @param {string} namespace - the namespace to store the secret * @returns {Promise} whether the secret was deleted successfully */ async deleteSecret (name, namespace) { @@ -1019,17 +1130,30 @@ export class K8 { return resp.response.statusCode === 200.0 } + /** + * @returns {string} + * @private + */ _getNamespace () { const ns = this.configManager.getFlag(flags.namespace) if (!ns) throw new MissingArgumentError('namespace is not set') return ns } + /** + * @param {string} fileName + * @returns {string} + * @private + */ _tempFileFor (fileName) { const tmpFile = `${fileName}-${uuid4()}` return path.join(os.tmpdir(), tmpFile) } + /** + * @param {string} tmpFile + * @private + */ _deleteTempFile (tmpFile) { if (fs.existsSync(tmpFile)) { fs.rmSync(tmpFile) diff --git a/src/core/key_manager.mjs b/src/core/key_manager.mjs index 8eb927f75..ef7bd4c06 100644 --- a/src/core/key_manager.mjs +++ b/src/core/key_manager.mjs @@ -14,18 +14,33 @@ * limitations under the License. * */ +'use strict' import * as x509 from '@peculiar/x509' import crypto from 'crypto' import fs from 'fs' import path from 'path' -import { FullstackTestingError, MissingArgumentError } from './errors.mjs' -import { getTmpDir } from './helpers.mjs' -import { constants, Keytool } from './index.mjs' +import { FullstackTestingError, IllegalArgumentError, MissingArgumentError } from './errors.mjs' +import { constants } from './index.mjs' import { Logger } from './logging.mjs' import { Templates } from './templates.mjs' +import * as helpers from './helpers.mjs' +import chalk from 'chalk' x509.cryptoProvider.set(crypto) +/** + * @typedef {Object} NodeKeyObject + * @property {CryptoKey} privateKey + * @property {x509.X509Certificate} certificate + * @property {x509.X509Certificates} certificateChain + */ + +/** + * @typedef {Object} PrivateKeyAndCertificateObject + * @property {string} privateKeyFile + * @property {string} certificateFile + */ + export class KeyManager { static SigningKeyAlgo = { name: 'RSASSA-PKCS1-v1_5', @@ -60,6 +75,9 @@ export class KeyManager { hash: 'SHA-384' } + /** + * @param {Logger} logger + */ constructor (logger) { if (!logger || !(logger instanceof Logger)) throw new MissingArgumentError('An instance of core/Logger is required') this.logger = logger @@ -67,7 +85,7 @@ export class KeyManager { /** * Convert CryptoKey into PEM string - * @param privateKey + * @param {CryptoKey} privateKey * @returns {Promise} */ async convertPrivateKeyToPem (privateKey) { @@ -77,9 +95,9 @@ export class KeyManager { /** * Convert PEM private key into CryptoKey - * @param pemStr PEM string - * @param algo key algorithm - * @param keyUsages key usages + * @param {string} pemStr - PEM string + * @param {*} algo - key algorithm + * @param {string[]} [keyUsages] * @returns {Promise} */ async convertPemToPrivateKey (pemStr, algo, keyUsages = ['sign']) { @@ -98,10 +116,10 @@ export class KeyManager { /** * Return file names for node key - * @param nodeId node ID - * @param keyPrefix key prefix such as constants.PFX_AGREEMENT_KEY_PREFIX - * @param keysDir directory where keys and certs are stored - * @returns {{privateKeyFile: string, certificateFile: string}} + * @param {string} nodeId + * @param {string} keysDir - directory where keys and certs are stored + * @param {string} [keyPrefix] - key prefix such as constants.SIGNING_KEY_PREFIX + * @returns {PrivateKeyAndCertificateObject} */ prepareNodeKeyFilePaths (nodeId, keysDir, keyPrefix = constants.SIGNING_KEY_PREFIX) { if (!nodeId) throw new MissingArgumentError('nodeId is required') @@ -119,9 +137,9 @@ export class KeyManager { /** * Return file names for TLS key - * @param nodeId node ID - * @param keysDir directory where keys and certs are stored - * @returns {{privateKeyFile: string, certificateFile: string}} + * @param {string} nodeId + * @param {string} keysDir - directory where keys and certs are stored + * @returns {PrivateKeyAndCertificateObject} */ prepareTLSKeyFilePaths (nodeId, keysDir) { if (!nodeId) throw new MissingArgumentError('nodeId is required') @@ -138,12 +156,12 @@ export class KeyManager { /** * Store node keys and certs as PEM files - * @param nodeId node ID - * @param nodeKey an object containing privateKeyPem, certificatePem data - * @param keysDir directory where keys and certs are stored - * @param nodeKeyFiles an object stores privateKeyFile and certificateFile - * @param keyName optional key type name for logging - * @return a Promise that saves the keys and certs as PEM files + * @param {string} nodeId + * @param {NodeKeyObject} nodeKey + * @param {string} keysDir - directory where keys and certs are stored + * @param {PrivateKeyAndCertificateObject} nodeKeyFiles + * @param {string} [keyName] - optional key type name for logging + * @returns {Promise} a Promise that saves the keys and certs as PEM files */ async storeNodeKey (nodeId, nodeKey, keysDir, nodeKeyFiles, keyName = '') { if (!nodeId) { @@ -193,8 +211,7 @@ export class KeyManager { }) self.logger.debug(`Stored ${keyName} key for node: ${nodeId}`, { - nodeKeyFiles, - cert: certPems[0] + nodeKeyFiles }) resolve(nodeKeyFiles) @@ -206,12 +223,12 @@ export class KeyManager { /** * Load node keys and certs from PEM files - * @param nodeId node ID - * @param keysDir directory where keys and certs are stored - * @param algo algorithm used for key - * @param nodeKeyFiles an object stores privateKeyFile and certificateFile - * @param keyName optional key type name for logging - * @return returns a dictionary object contains privateKey, certificate, certificateChain + * @param {string} nodeId + * @param {string} keysDir - directory where keys and certs are stored + * @param {*} algo - algorithm used for key + * @param {{privateKeyFile: string, certificateFile: string}} nodeKeyFiles an object stores privateKeyFile and certificateFile + * @param {string} [keyName] - optional key type name for logging + * @returns {Promise} */ async loadNodeKey (nodeId, keysDir, algo, nodeKeyFiles, keyName = '') { if (!nodeId) { @@ -243,6 +260,7 @@ export class KeyManager { const certBytes = await fs.readFileSync(nodeKeyFiles.certificateFile) const certPems = x509.PemConverter.decode(certBytes.toString()) + /** @type {x509.X509Certificate[]} */ const certs = [] certPems.forEach(certPem => { const cert = new x509.X509Certificate(certPem) @@ -264,8 +282,8 @@ export class KeyManager { /** * Generate signing key and certificate - * @param nodeId node ID - * @return returns a dictionary object stores privateKey, certificate, certificateChain + * @param {string} nodeId + * @returns {Promise<{NodeKeyObject>} */ async generateSigningKey (nodeId) { try { @@ -310,10 +328,10 @@ export class KeyManager { /** * Store signing key and certificate - * @param nodeId node ID - * @param nodeKey an object containing privateKeyPem, certificatePem data - * @param keysDir directory where keys and certs are stored - * @return returns a Promise that saves the keys and certs as PEM files + * @param {string} nodeId + * @param {NodeKeyObject} nodeKey - an object containing privateKeyPem, certificatePem data + * @param {string} keysDir - directory where keys and certs are stored + * @returns {Promise<*>} returns a Promise that saves the keys and certs as PEM files */ async storeSigningKey (nodeId, nodeKey, keysDir) { const nodeKeyFiles = this.prepareNodeKeyFilePaths(nodeId, keysDir, constants.SIGNING_KEY_PREFIX) @@ -322,9 +340,9 @@ export class KeyManager { /** * Load signing key and certificate - * @param nodeId node ID - * @param keysDir directory path where pem files are stored - * @return returns a dictionary object contains privateKey, certificate, certificateChain + * @param {string} nodeId + * @param {string} keysDir - directory path where pem files are stored + * @returns {Promise} */ async loadSigningKey (nodeId, keysDir) { const nodeKeyFiles = this.prepareNodeKeyFilePaths(nodeId, keysDir, constants.SIGNING_KEY_PREFIX) @@ -334,10 +352,10 @@ export class KeyManager { /** * Generate EC key and cert * - * @param nodeId node ID - * @param keyPrefix key prefix such as constants.PFX_AGREEMENT_KEY_PREFIX - * @param signingKey signing key - * @return a dictionary object stores privateKey, certificate, certificateChain + * @param {string} nodeId + * @param {string} keyPrefix - key prefix such as constants.SIGNING_KEY_PREFIX + * @param {NodeKeyObject} signingKey + * @returns {Promise} a dictionary object stores privateKey, certificate, certificateChain */ async ecKey (nodeId, keyPrefix, signingKey) { if (!nodeId) throw new MissingArgumentError('nodeId is required') @@ -388,39 +406,6 @@ export class KeyManager { } } - /** - * Generate agreement key - * @param nodeId node ID - * @param signingKey signing key - * @return a dictionary object stores privateKey, certificate, certificateChain - */ - async generateAgreementKey (nodeId, signingKey) { - return this.ecKey(nodeId, constants.AGREEMENT_KEY_PREFIX, signingKey) - } - - /** - * Store agreement key and certificate - * @param nodeId node ID - * @param nodeKey an object containing privateKeyPem, certificatePem data - * @param keysDir directory where keys and certs are stored - * @return a Promise that saves the keys and certs as PEM files - */ - async storeAgreementKey (nodeId, nodeKey, keysDir) { - const nodeKeyFiles = this.prepareNodeKeyFilePaths(nodeId, keysDir, constants.AGREEMENT_KEY_PREFIX) - return this.storeNodeKey(nodeId, nodeKey, keysDir, nodeKeyFiles, 'agreement') - } - - /** - * Load agreement key and certificate - * @param nodeId node ID - * @param keysDir directory path where pem files are stored - * @return a dictionary object contains privateKey, certificate, certificateChain - */ - async loadAgreementKey (nodeId, keysDir) { - const nodeKeyFiles = this.prepareNodeKeyFilePaths(nodeId, keysDir, constants.AGREEMENT_KEY_PREFIX) - return this.loadNodeKey(nodeId, keysDir, KeyManager.ECKeyAlgo, nodeKeyFiles, 'agreement') - } - /** * Generate gRPC TLS key * @@ -428,9 +413,9 @@ export class KeyManager { * hedera-.key * hedera-.crt * - * @param nodeId - * @param distinguishedName distinguished name as: new x509.Name(`CN=${nodeId},ST=${state},L=${locality},O=${org},OU=${orgUnit},C=${country}`) - * @return {Promise} + * @param {string} nodeId + * @param {x509.Name} distinguishedName distinguished name as: new x509.Name(`CN=${nodeId},ST=${state},L=${locality},O=${org},OU=${orgUnit},C=${country}`) + * @returns {Promise} */ async generateGrpcTLSKey (nodeId, distinguishedName = new x509.Name(`CN=${nodeId}`)) { if (!nodeId) throw new MissingArgumentError('nodeId is required') @@ -478,10 +463,10 @@ export class KeyManager { /** * Store TLS key and certificate - * @param nodeId node ID - * @param nodeKey an object containing privateKeyPem, certificatePem data - * @param keysDir directory where keys and certs are stored - * @return a Promise that saves the keys and certs as PEM files + * @param {string} nodeId + * @param {NodeKeyObject} nodeKey + * @param {string} keysDir - directory where keys and certs are stored + * @returns {Promise} a Promise that saves the keys and certs as PEM files */ async storeTLSKey (nodeId, nodeKey, keysDir) { const nodeKeyFiles = this.prepareTLSKeyFilePaths(nodeId, keysDir) @@ -490,171 +475,113 @@ export class KeyManager { /** * Load TLS key and certificate - * @param nodeId node ID - * @param keysDir directory path where pem files are stored - * @return a dictionary object contains privateKey, certificate, certificateChain + * @param {string} nodeId + * @param {string} keysDir - directory path where pem files are stored + * @returns {Promise} */ async loadTLSKey (nodeId, keysDir) { const nodeKeyFiles = this.prepareTLSKeyFilePaths(nodeId, keysDir) return this.loadNodeKey(nodeId, keysDir, KeyManager.TLSKeyAlgo, nodeKeyFiles, 'gRPC TLS') } + async copyNodeKeysToStaging (nodeKey, destDir) { + for (const keyFile of [nodeKey.privateKeyFile, nodeKey.certificateFile]) { + if (!fs.existsSync(keyFile)) { + throw new FullstackTestingError(`file (${keyFile}) is missing`) + } + + const fileName = path.basename(keyFile) + fs.cpSync(keyFile, path.join(destDir, fileName)) + } + } + + async copyGossipKeysToStaging (keysDir, stagingKeysDir, nodeIds) { + // copy gossip keys to the staging + for (const nodeId of nodeIds) { + const signingKeyFiles = this.prepareNodeKeyFilePaths(nodeId, keysDir, constants.SIGNING_KEY_PREFIX) + await this.copyNodeKeysToStaging(signingKeyFiles, stagingKeysDir) + } + } + /** - * Generate PFX private key file + * Return a list of subtasks to generate gossip keys * - * It generates 'private-.pfx' containing: - * - s-key & cert: self-signed signing key - * - a-key & cert: agreement key and signed cert - * - e-key & cert: encryption key and signed cert (currently unused) + * WARNING: These tasks MUST run in sequence. * - * @param keytool an instance of Keytool class - * @param nodeId node id - * @param keysDir directory where the pfx files should be stored - * @param tmpDir tmp directory where intermediate files can be stored. - * @return {Promise} path to the pfx file + * @param keytoolDepManager an instance of core/KeytoolDepManager + * @param nodeIds node ids + * @param keysDir keys directory + * @param curDate current date + * @param allNodeIds includes the nodeIds to get new keys as well as existing nodeIds that will be included in the public.pfx file + * @return a list of subtasks + * @private */ - async generatePrivatePfxKeys (keytool, nodeId, keysDir, tmpDir = getTmpDir()) { - if (!keytool || !(keytool instanceof Keytool)) throw new MissingArgumentError('An instance of core/Keytool is required') - if (!nodeId) throw new MissingArgumentError('nodeId is required') - if (!keysDir) throw new MissingArgumentError('keysDir is required') - if (!fs.existsSync(keysDir)) throw new MissingArgumentError('keysDir does not exist') - - const privatePfxFile = path.join(keysDir, `private-${nodeId}.pfx`) - if (fs.existsSync(privatePfxFile)) { - this.logger.debug(`overwriteKeys is set to false and private pfx file already exists: ${privatePfxFile}`) - return privatePfxFile + taskGenerateGossipKeys (keytoolDepManager, nodeIds, keysDir, curDate = new Date(), allNodeIds = null) { + allNodeIds = allNodeIds || nodeIds + if (!Array.isArray(nodeIds) || !nodeIds.every((nodeId) => typeof nodeId === 'string')) { + throw new IllegalArgumentError('nodeIds must be an array of strings, nodeIds = ' + JSON.stringify(nodeIds)) } + const self = this + const subTasks = [] - const validity = constants.CERTIFICATE_VALIDITY_YEARS * 365 - const tmpPrivatePfxFile = path.join(tmpDir, `private-${nodeId}.pfx`) - const signedKeyAlias = `${constants.SIGNING_KEY_PREFIX}-${nodeId}` - - // signing key (s key) - await keytool.genKeyPair( - `-alias ${signedKeyAlias}`, - `-keystore ${tmpPrivatePfxFile}`, - '-storetype pkcs12', - '-storepass password', - `-dname cn=s-${nodeId}`, - '-keyalg rsa', - '-sigalg SHA384withRSA', - '-keysize 3072', - `-validity ${validity}` + subTasks.push({ + title: 'Backup old files', + task: () => helpers.backupOldPemKeys(nodeIds, keysDir, curDate) + } ) - // generate signed keys (a-key and e-key) - for (const keyPrefix of [constants.AGREEMENT_KEY_PREFIX, constants.ENCRYPTION_KEY_PREFIX]) { - const certReqFile = path.join(tmpDir, `${nodeId}-cert-req-${keyPrefix}.pfx`) - const certFile = path.join(tmpDir, `${nodeId}-signed-cert-${keyPrefix}.pfx`) - const alias = `${keyPrefix}-${nodeId}` - // generate key pair - await keytool.genKeyPair( - `-alias ${alias}`, - `-keystore "${tmpPrivatePfxFile}"`, - '-storetype pkcs12', - '-storepass password', - `-dname cn=${alias}`, - '-keyalg ec', - '-sigalg SHA384withECDSA', - '-groupname secp384r1', - `-validity ${validity}` - ) - - // cert-req - await keytool.certReq( - `-alias ${alias}`, - `-keystore "${tmpPrivatePfxFile}"`, - '-storetype pkcs12', - '-storepass password', - `-file "${certReqFile}"` - ) - - // signed cert - await keytool.genCert( - `-alias ${signedKeyAlias}`, - `-keystore "${tmpPrivatePfxFile}"`, - '-storetype pkcs12', - '-storepass password', - `-validity ${validity}`, - `-infile "${certReqFile}"`, - `-outfile "${certFile}"` - ) - - // import signed cert in private-pfx file - await keytool.importCert( - `-alias ${alias}`, - `-keystore "${tmpPrivatePfxFile}"`, - '-storetype pkcs12', - '-storepass password', - `-file "${certFile}"` - ) + for (const nodeId of nodeIds) { + subTasks.push({ + title: `Gossip key for node: ${chalk.yellow(nodeId)}`, + task: async () => { + const signingKey = await self.generateSigningKey(nodeId) + const signingKeyFiles = await self.storeSigningKey(nodeId, signingKey, keysDir) + this.logger.debug(`generated Gossip signing keys for node ${nodeId}`, { keyFiles: signingKeyFiles }) + } + }) } - - this.logger.debug(`Copying generated private pfx file: ${tmpPrivatePfxFile} -> ${privatePfxFile}`) - fs.cpSync(tmpPrivatePfxFile, privatePfxFile) - - return privatePfxFile + return subTasks } /** - * Update PFX public key file + * Return a list of subtasks to generate gRPC TLS keys * - * WARNING: do not invoke this method in parallel as the same public.pfx will be modified for the given node ids. + * WARNING: These tasks should run in sequence * - * @param keytool an instance of core/Keytool - * @param nodeIds node Ids + * @param nodeIds node ids * @param keysDir keys directory - * @param tmpDir tmp directory where intermediate files can be stored. - * @return {Promise} + * @param curDate current date + * @return return a list of subtasks + * @private */ - async updatePublicPfxKey (keytool, nodeIds, keysDir, tmpDir = getTmpDir()) { - if (!keytool || !(keytool instanceof Keytool)) throw new MissingArgumentError('An instance of core/Keytool is required') - if (!nodeIds) throw new MissingArgumentError('nodeId is required') - if (!keysDir) throw new MissingArgumentError('keysDir is required') - if (!fs.existsSync(keysDir)) throw new MissingArgumentError('keysDir does not exist') + taskGenerateTLSKeys (nodeIds, keysDir, curDate = new Date()) { + // check if nodeIds is an array of strings + if (!Array.isArray(nodeIds) || !nodeIds.every((nodeId) => typeof nodeId === 'string')) { + throw new FullstackTestingError('nodeIds must be an array of strings') + } + const self = this + const nodeKeyFiles = new Map() + const subTasks = [] - const publicPfxFile = path.join(keysDir, 'public.pfx') - const validity = constants.CERTIFICATE_VALIDITY_YEARS * 365 - const tmpPublicPfxFile = path.join(tmpDir, constants.PUBLIC_PFX) - if (fs.existsSync(publicPfxFile)) { - fs.cpSync(publicPfxFile, tmpPublicPfxFile) + subTasks.push({ + title: 'Backup old files', + task: () => helpers.backupOldTlsKeys(nodeIds, keysDir, curDate) } + ) for (const nodeId of nodeIds) { - const privatePfxFile = path.join(keysDir, `private-${nodeId}.pfx`) - if (!fs.existsSync(privatePfxFile)) throw new FullstackTestingError(`private pfx ${privatePfxFile} file does not exist`) - - for (const keyPrefix of - [constants.SIGNING_KEY_PREFIX, constants.AGREEMENT_KEY_PREFIX, constants.ENCRYPTION_KEY_PREFIX]) { - const certFile = path.join(tmpDir, `${nodeId}-cert-${keyPrefix}.pfx`) - const alias = `${keyPrefix}-${nodeId}` - - // export signed cert - await keytool.exportCert( - `-alias ${alias}`, - `-keystore "${privatePfxFile}"`, - '-storetype pkcs12', - '-storepass password', - `-validity ${validity}`, - `-file "${certFile}"` - ) - - // import signed cert - await keytool.importCert( - `-alias ${alias}`, - `-keystore "${tmpPublicPfxFile}"`, - '-storetype pkcs12', - '-storepass password', - '-noprompt', - `-file "${certFile}"` - ) - } + subTasks.push({ + title: `TLS key for node: ${chalk.yellow(nodeId)}`, + task: async () => { + const tlsKey = await self.generateGrpcTLSKey(nodeId) + const tlsKeyFiles = await self.storeTLSKey(nodeId, tlsKey, keysDir) + nodeKeyFiles.set(nodeId, { + tlsKeyFiles + }) + } + }) } - // copy generated pfx file to desired location - this.logger.debug(`Copying generated public.pfx file: ${tmpPublicPfxFile} -> ${publicPfxFile}`) - fs.cpSync(tmpPublicPfxFile, publicPfxFile) - - return publicPfxFile + return subTasks } } diff --git a/src/core/keytool.mjs b/src/core/keytool.mjs index 5860c02fd..b5567e639 100644 --- a/src/core/keytool.mjs +++ b/src/core/keytool.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import os from 'os' import { constants } from './index.mjs' import { ShellRunner } from './shell_runner.mjs' @@ -21,6 +22,10 @@ import { Templates } from './templates.mjs' import { MissingArgumentError } from './errors.mjs' export class Keytool extends ShellRunner { + /** + * @param {Logger} logger + * @param {NodeJS.Platform} [osPlatform] + */ constructor (logger, osPlatform = os.platform()) { if (!logger) throw new MissingArgumentError('an instance of core/Logger is required', logger) super(logger) @@ -30,8 +35,8 @@ export class Keytool extends ShellRunner { /** * Prepare a `keytool` shell command string - * @param action represents a helm command (e.g. create | install | get ) - * @param args args of the command + * @param {string} action - represents a helm command (e.g. create | install | get ) + * @param {string} args - args of the command * @returns {string} */ prepareCommand (action, ...args) { @@ -44,7 +49,7 @@ export class Keytool extends ShellRunner { /** * Invoke `keytool -genkeypair` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async genKeyPair (...args) { @@ -53,7 +58,7 @@ export class Keytool extends ShellRunner { /** * Invoke `keytool -certreq` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async certReq (...args) { @@ -62,7 +67,7 @@ export class Keytool extends ShellRunner { /** * Invoke `keytool -gencert` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async genCert (...args) { @@ -71,7 +76,7 @@ export class Keytool extends ShellRunner { /** * Invoke `keytool -importcert` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async importCert (...args) { @@ -80,7 +85,7 @@ export class Keytool extends ShellRunner { /** * Invoke `keytool -exportcert` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async exportCert (...args) { @@ -89,7 +94,7 @@ export class Keytool extends ShellRunner { /** * Invoke `keytool -list` command - * @param args args of the command + * @param {string} args - args of the command * @returns {Promise} console output as an array of strings */ async list (...args) { diff --git a/src/core/logging.mjs b/src/core/logging.mjs index bdb1671eb..393d06445 100644 --- a/src/core/logging.mjs +++ b/src/core/logging.mjs @@ -14,11 +14,13 @@ * limitations under the License. * */ +'use strict' import * as winston from 'winston' import { constants } from './index.mjs' import { v4 as uuidv4 } from 'uuid' import * as util from 'util' import chalk from 'chalk' +import path from 'path' const customFormat = winston.format.combine( winston.format.label({ label: 'SOLO', message: false }), @@ -77,18 +79,24 @@ export const Logger = class { // - Write all logs with importance level of `error` or less to `error.log` // - Write all logs with importance level of `info` or less to `solo.log` // - new winston.transports.File({ filename: `${constants.SOLO_LOGS_DIR}/solo.log` }) + new winston.transports.File({ filename: path.join(constants.SOLO_LOGS_DIR, 'solo.log') }) // new winston.transports.File({filename: constants.TMP_DIR + "/logs/error.log", level: 'error'}), // new winston.transports.Console({format: customFormat}) ] }) } + /** + * @param {boolean} devMode + */ setDevMode (devMode) { this.debug(`dev mode logging: ${devMode}`) this.devMode = devMode } + /** + * @param {string} level + */ setLevel (level) { this.winstonLogger.setLevel(level) } @@ -97,6 +105,10 @@ export const Logger = class { this.traceId = uuidv4() } + /** + * @param {Object|undefined} meta + * @returns {Object} + */ prepMeta (meta) { if (meta === undefined) { meta = {} @@ -106,10 +118,17 @@ export const Logger = class { return meta } + /** + * @param msg + * @param args + */ showUser (msg, ...args) { console.log(util.format(msg, ...args)) } + /** + * @param {Error} err + */ showUserError (err) { const stack = [{ message: err.message, stacktrace: err.stack }] if (err.cause) { @@ -146,22 +165,43 @@ export const Logger = class { this.debug(err.message, { error: err.message, stacktrace: stack }) } + /** + * @param {string} msg + * @param {*} args + */ error (msg, ...args) { this.winstonLogger.error(msg, ...args, this.prepMeta()) } + /** + * @param {string} msg + * @param {*} args + */ warn (msg, ...args) { this.winstonLogger.warn(msg, ...args, this.prepMeta()) } + /** + * @param {string} msg + * @param {*} args + */ info (msg, ...args) { this.winstonLogger.info(msg, ...args, this.prepMeta()) } + /** + * @param {string} msg + * @param {*} args + */ debug (msg, ...args) { this.winstonLogger.debug(msg, ...args, this.prepMeta()) } + /** + * @param {string} title + * @param {string[]} items + * @returns {boolean} + */ showList (title, items = []) { this.showUser(chalk.green(`\n *** ${title} ***`)) this.showUser(chalk.green('-------------------------------------------------------------------------------')) @@ -175,6 +215,10 @@ export const Logger = class { return true } + /** + * @param {string} title + * @param {Object} obj + */ showJSON (title, obj) { this.showUser(chalk.green(`\n *** ${title} ***`)) this.showUser(chalk.green('-------------------------------------------------------------------------------')) @@ -182,6 +226,12 @@ export const Logger = class { } } +/** + * @param {string} [level] + * @param {boolean} [devMode] + * @returns {Logger} + * @constructor + */ export function NewLogger (level = 'debug', devMode = false) { return new Logger(level, devMode) } diff --git a/src/core/network_node_services.mjs b/src/core/network_node_services.mjs index cb642b3e7..205ec46c5 100644 --- a/src/core/network_node_services.mjs +++ b/src/core/network_node_services.mjs @@ -14,9 +14,34 @@ * limitations under the License. * */ +'use strict' export class NetworkNodeServices { + /** + * @param {Object} builder + * @param {string} builder.nodeName + * @param {string} builder.nodePodName + * @param {string} builder.haProxyName + * @param {string} builder.haProxyLoadBalancerIp + * @param {string} builder.haProxyClusterIp + * @param {string|number} builder.haProxyGrpcPort + * @param {string|number} builder.haProxyGrpcsPort + * @param {string} builder.accountId + * @param {string} builder.haProxyAppSelector + * @param {string} builder.haProxyPodName + * @param {string} builder.nodeServiceName + * @param {string} builder.nodeServiceClusterIp + * @param {string} builder.nodeServiceLoadBalancerIp + * @param {string|number} builder.nodeServiceGossipPort + * @param {string|number} builder.nodeServiceGrpcPort + * @param {string|number} builder.nodeServiceGrpcsPort + * @param {string} builder.envoyProxyName + * @param {string} builder.envoyProxyClusterIp + * @param {string} builder.envoyProxyLoadBalancerIp + * @param {string|number} builder.envoyProxyGrpcWebPort + */ constructor (builder) { this.nodeName = builder.nodeName + this.nodePodName = builder.nodePodName this.haProxyName = builder.haProxyName this.haProxyLoadBalancerIp = builder.haProxyLoadBalancerIp this.haProxyClusterIp = builder.haProxyClusterIp @@ -37,110 +62,203 @@ export class NetworkNodeServices { this.envoyProxyGrpcWebPort = builder.envoyProxyGrpcWebPort } + /** + * @returns {string} + */ key () { return this.nodeName } } export class NetworkNodeServicesBuilder { + /** + * @param {string} nodeName + */ constructor (nodeName) { this.nodeName = nodeName } + /** + * @param {string} accountId + * @returns {this} + */ withAccountId (accountId) { this.accountId = accountId return this } + /** + * @param {string} haProxyName + * @returns {this} + */ withHaProxyName (haProxyName) { this.haProxyName = haProxyName return this } + /** + * @param {string} haProxyClusterIp + * @returns {this} + */ withHaProxyClusterIp (haProxyClusterIp) { this.haProxyClusterIp = haProxyClusterIp return this } + /** + * @param {string} haProxyLoadBalancerIp + * @returns {this} + */ withHaProxyLoadBalancerIp (haProxyLoadBalancerIp) { this.haProxyLoadBalancerIp = haProxyLoadBalancerIp return this } + /** + * @param {string|number} haProxyGrpcPort + * @returns {this} + */ withHaProxyGrpcPort (haProxyGrpcPort) { this.haProxyGrpcPort = haProxyGrpcPort return this } + /** + * @param {string|number} haProxyGrpcsPort + * @returns {this} + */ withHaProxyGrpcsPort (haProxyGrpcsPort) { this.haProxyGrpcsPort = haProxyGrpcsPort return this } + /** + * @param {string} haProxyAppSelector + * @returns {this} + */ withHaProxyAppSelector (haProxyAppSelector) { this.haProxyAppSelector = haProxyAppSelector return this } + /** + * @param {string} haProxyPodName + * @returns {this} + */ withHaProxyPodName (haProxyPodName) { this.haProxyPodName = haProxyPodName return this } + /** + * @param {string} nodePodName + * @returns {this} + */ + withNodePodName (nodePodName) { + this.nodePodName = nodePodName + return this + } + + /** + * @param {string} nodeServiceName + * @returns {this} + */ withNodeServiceName (nodeServiceName) { this.nodeServiceName = nodeServiceName return this } + /** + * @param {string} nodeServiceClusterIp + * @returns {this} + */ withNodeServiceClusterIp (nodeServiceClusterIp) { this.nodeServiceClusterIp = nodeServiceClusterIp return this } + /** + * @param {string} nodeServiceLoadBalancerIp + * @returns {this} + */ withNodeServiceLoadBalancerIp (nodeServiceLoadBalancerIp) { this.nodeServiceLoadBalancerIp = nodeServiceLoadBalancerIp return this } + /** + * @param {string|number} nodeServiceGossipPort + * @returns {this} + */ withNodeServiceGossipPort (nodeServiceGossipPort) { this.nodeServiceGossipPort = nodeServiceGossipPort return this } + /** + * @param {string|number} nodeServiceGrpcPort + * @returns {this} + */ withNodeServiceGrpcPort (nodeServiceGrpcPort) { this.nodeServiceGrpcPort = nodeServiceGrpcPort return this } + /** + * @param {string|number} nodeServiceGrpcsPort + * @returns {this} + */ withNodeServiceGrpcsPort (nodeServiceGrpcsPort) { this.nodeServiceGrpcsPort = nodeServiceGrpcsPort return this } + /** + * @param {string} envoyProxyName + * @returns {this} + */ withEnvoyProxyName (envoyProxyName) { this.envoyProxyName = envoyProxyName return this } + /** + * @param {string} envoyProxyClusterIp + * @returns {this} + */ withEnvoyProxyClusterIp (envoyProxyClusterIp) { this.envoyProxyClusterIp = envoyProxyClusterIp return this } + /** + * @param {string} envoyProxyLoadBalancerIp + * @returns {this} + */ withEnvoyProxyLoadBalancerIp (envoyProxyLoadBalancerIp) { this.envoyProxyLoadBalancerIp = envoyProxyLoadBalancerIp return this } + /** + * @param {string|number} envoyProxyGrpcWebPort + * @returns {this} + */ withEnvoyProxyGrpcWebPort (envoyProxyGrpcWebPort) { this.envoyProxyGrpcWebPort = envoyProxyGrpcWebPort return this } + /** + * @returns {NetworkNodeServices} + */ build () { return new NetworkNodeServices(this) } + /** + * @returns {string} + */ key () { return this.nodeName } diff --git a/src/core/package_downloader.mjs b/src/core/package_downloader.mjs index d8191845c..40c164baa 100644 --- a/src/core/package_downloader.mjs +++ b/src/core/package_downloader.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import * as crypto from 'crypto' import * as fs from 'fs' import { pipeline as streamPipeline } from 'node:stream/promises' @@ -34,13 +35,17 @@ import { constants } from './index.mjs' export class PackageDownloader { /** * Create an instance of Downloader - * @param logger an instance of core/Logger + * @param {Logger} logger - an instance of core/Logger */ constructor (logger) { if (!logger) throw new IllegalArgumentError('an instance of core/Logger is required', logger) this.logger = logger } + /** + * @param {string} url + * @returns {boolean} + */ isValidURL (url) { try { // attempt to parse to check URL format @@ -52,6 +57,10 @@ export class PackageDownloader { return false } + /** + * @param {string} url + * @returns {Promise} + */ async urlExists (url) { const self = this @@ -102,8 +111,9 @@ export class PackageDownloader { /** * Fetch data from a URL and save the output to a file * - * @param url source file URL - * @param destPath destination path for the downloaded file + * @param {string} url - source file URL + * @param {string} destPath - destination path for the downloaded file + * @returns {Promise} */ async fetchFile (url, destPath) { if (!url) { @@ -136,8 +146,8 @@ export class PackageDownloader { /** * Compute hash of the file contents - * @param filePath path of the file - * @param algo hash algorithm + * @param {string} filePath - path of the file + * @param {string} [algo] - hash algorithm * @returns {Promise} returns hex digest of the computed hash * @throws Error if the file cannot be read */ @@ -172,9 +182,10 @@ export class PackageDownloader { * * It throws error if the checksum doesn't match. * - * @param sourceFile path to the file for which checksum to be computed - * @param checksum expected checksum - * @param algo hash algorithm to be used to compute checksum + * @param {string} sourceFile - path to the file for which checksum to be computed + * @param checksum - expected checksum + * @param {string} [algo] - hash algorithm to be used to compute checksum + * @returns {Promise} * @throws DataValidationError if the checksum doesn't match */ async verifyChecksum (sourceFile, checksum, algo = 'sha256') { @@ -184,12 +195,12 @@ export class PackageDownloader { /** * Fetch a remote package - * @param packageURL package URL - * @param checksumURL package checksum URL - * @param destDir a directory where the files should be downloaded to - * @param algo checksum algo - * @param force force download even if the file exists in the destDir - * @return {Promise} + * @param {string} packageURL + * @param {string} checksumURL - package checksum URL + * @param {string} destDir - a directory where the files should be downloaded to + * @param {string} [algo] - checksum algo + * @param {boolean} [force] - force download even if the file exists in the destDir + * @returns {Promise} */ async fetchPackage (packageURL, checksumURL, destDir, algo = 'sha256', force = false) { if (!packageURL) throw new Error('package URL is required') @@ -234,12 +245,11 @@ export class PackageDownloader { * * It fetches the build.zip file containing the release from a URL like: https://builds.hedera.com/node/software/v0.40/build-v0.40.4.zip * - * @param tag full semantic version e.g. v0.40.4 - * @param destDir directory where the artifact needs to be saved - * @param force whether to download even if the file exists + * @param {string} tag - full semantic version e.g. v0.40.4 + * @param {string} destDir - directory where the artifact needs to be saved + * @param {boolean} [force] - whether to download even if the file exists * @returns {Promise} full path to the downloaded file */ - async fetchPlatform (tag, destDir, force = false) { if (!tag) throw new MissingArgumentError('tag is required') if (!destDir) throw new MissingArgumentError('destination directory path is required') diff --git a/src/core/platform_installer.mjs b/src/core/platform_installer.mjs index f27ee44d9..3b29fb0d2 100644 --- a/src/core/platform_installer.mjs +++ b/src/core/platform_installer.mjs @@ -14,20 +14,27 @@ * limitations under the License. * */ +'use strict' import * as fs from 'fs' -import * as os from 'os' import { Listr } from 'listr2' import * as path from 'path' -import * as semver from 'semver' import { FullstackTestingError, IllegalArgumentError, MissingArgumentError } from './errors.mjs' import { constants } from './index.mjs' import { Templates } from './templates.mjs' import { flags } from '../commands/index.mjs' +import * as Base64 from 'js-base64' +import chalk from 'chalk' /** * PlatformInstaller install platform code in the root-container of a network pod */ export class PlatformInstaller { + /** + * @param {Logger} logger + * @param {K8} k8 + * @param {ConfigManager} configManager + * @param {AccountManager} accountManager + */ constructor (logger, k8, configManager, accountManager) { if (!logger) throw new MissingArgumentError('an instance of core/Logger is required') if (!k8) throw new MissingArgumentError('an instance of core/K8 is required') @@ -40,12 +47,20 @@ export class PlatformInstaller { this.accountManager = accountManager } + /** + * @returns {string} + * @private + */ _getNamespace () { const ns = this.configManager.getFlag(flags.namespace) if (!ns) throw new MissingArgumentError('namespace is not set') return ns } + /** + * @param {string} releaseDir + * @returns {Promise} + */ async validatePlatformReleaseDir (releaseDir) { if (!releaseDir) throw new MissingArgumentError('releaseDir is required') if (!fs.existsSync(releaseDir)) { @@ -79,11 +94,10 @@ export class PlatformInstaller { /** * Fetch and extract platform code into the container - * @param podName pod name - * @param tag platform release tag - * @return {Promise} + * @param {string} podName + * @param {string} tag - platform release tag + * @returns {Promise} */ - async fetchPlatform (podName, tag) { if (!podName) throw new MissingArgumentError('podName is required') if (!tag) throw new MissingArgumentError('tag is required') @@ -105,12 +119,11 @@ export class PlatformInstaller { /** * Copy a list of files to a directory in the container * - * @param podName pod name - * @param srcFiles list of source files - * @param destDir destination directory - * @param container name of the container - * - * @return {Promise} list of pathso of the copied files insider the container + * @param {string} podName + * @param {string[]} srcFiles - list of source files + * @param {string} destDir - destination directory + * @param {string} [container] - name of the container + * @returns {Promise} list of pathso of the copied files insider the container */ async copyFiles (podName, srcFiles, destDir, container = constants.ROOT_CONTAINER) { try { @@ -139,97 +152,79 @@ export class PlatformInstaller { } } - async copyGossipKeys (podName, stagingDir, nodeIds, keyFormat = constants.KEY_FORMAT_PEM) { - const self = this - - if (!podName) throw new MissingArgumentError('podName is required') + async copyGossipKeys (nodeId, stagingDir, nodeIds) { + if (!nodeId) throw new MissingArgumentError('nodeId is required') if (!stagingDir) throw new MissingArgumentError('stagingDir is required') if (!nodeIds || nodeIds.length <= 0) throw new MissingArgumentError('nodeIds cannot be empty') try { - const keysDir = `${constants.HEDERA_HAPI_PATH}/data/keys` - const nodeId = Templates.extractNodeIdFromPodName(podName) const srcFiles = [] - switch (keyFormat) { - case constants.KEY_FORMAT_PEM: - // copy private keys for the node - srcFiles.push(`${stagingDir}/keys/${Templates.renderGossipPemPrivateKeyFile(constants.SIGNING_KEY_PREFIX, nodeId)}`) - srcFiles.push(`${stagingDir}/keys/${Templates.renderGossipPemPrivateKeyFile(constants.AGREEMENT_KEY_PREFIX, nodeId)}`) - - // copy all public keys for all nodes - nodeIds.forEach(id => { - srcFiles.push(`${stagingDir}/keys/${Templates.renderGossipPemPublicKeyFile(constants.SIGNING_KEY_PREFIX, id)}`) - srcFiles.push(`${stagingDir}/keys/${Templates.renderGossipPemPublicKeyFile(constants.AGREEMENT_KEY_PREFIX, id)}`) - }) - break - case constants.KEY_FORMAT_PFX: - srcFiles.push(`${stagingDir}/keys/${Templates.renderGossipPfxPrivateKeyFile(nodeId)}`) - srcFiles.push(`${stagingDir}/keys/${constants.PUBLIC_PFX}`) - break - default: - throw new FullstackTestingError(`Unsupported key file format ${keyFormat}`) - } - - return await self.copyFiles(podName, srcFiles, keysDir) - } catch (e) { - throw new FullstackTestingError(`failed to copy gossip keys to pod '${podName}': ${e.message}`, e) - } - } - - async copyPlatformConfigFiles (podName, stagingDir) { - const self = this - - if (!podName) throw new MissingArgumentError('podName is required') - if (!stagingDir) throw new MissingArgumentError('stagingDir is required') - - try { - const srcFilesSet1 = [ - `${stagingDir}/config.txt`, - `${stagingDir}/templates/log4j2.xml`, - `${stagingDir}/templates/settings.txt` - ] + // copy private keys for the node + srcFiles.push(path.join(stagingDir, 'keys', Templates.renderGossipPemPrivateKeyFile(constants.SIGNING_KEY_PREFIX, nodeId))) - const fileList1 = await self.copyFiles(podName, srcFilesSet1, constants.HEDERA_HAPI_PATH) + // copy all public keys for all nodes + nodeIds.forEach(id => { + srcFiles.push(path.join(stagingDir, 'keys', Templates.renderGossipPemPublicKeyFile(constants.SIGNING_KEY_PREFIX, id))) + }) - const srcFilesSet2 = [ - `${stagingDir}/templates/api-permission.properties`, - `${stagingDir}/templates/application.properties`, - `${stagingDir}/templates/bootstrap.properties` - ] - - const fileList2 = await self.copyFiles(podName, srcFilesSet2, `${constants.HEDERA_HAPI_PATH}/data/config`) + const data = {} + for (const srcFile of srcFiles) { + const fileContents = fs.readFileSync(srcFile) + const fileName = path.basename(srcFile) + data[fileName] = Base64.encode(fileContents) + } - return fileList1.concat(fileList2) + if (!await this.k8.createSecret( + Templates.renderGossipKeySecretName(nodeId), + this._getNamespace(), 'Opaque', data, + Templates.renderGossipKeySecretLabelObject(nodeId), true)) { + throw new FullstackTestingError(`failed to create secret for gossip keys for node '${nodeId}'`) + } } catch (e) { - throw new FullstackTestingError(`failed to copy config files to pod '${podName}': ${e.message}`, e) + this.logger.error(`failed to copy gossip keys to secret '${Templates.renderGossipKeySecretName(nodeId)}': ${e.message}`, e) + throw new FullstackTestingError(`failed to copy gossip keys to secret '${Templates.renderGossipKeySecretName(nodeId)}': ${e.message}`, e) } } - async copyTLSKeys (podName, stagingDir) { - if (!podName) throw new MissingArgumentError('podName is required') + async copyTLSKeys (nodeIds, stagingDir) { + if (!nodeIds) throw new MissingArgumentError('nodeId is required') if (!stagingDir) throw new MissingArgumentError('stagingDir is required') try { - const nodeId = Templates.extractNodeIdFromPodName(podName) - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), `${nodeId}-tls-keys-`)) + const data = {} - // rename files appropriately in the tmp directory - fs.cpSync(`${stagingDir}/keys/${Templates.renderTLSPemPrivateKeyFile(nodeId)}`, - `${tmpDir}/hedera.key`) - fs.cpSync(`${stagingDir}/keys/${Templates.renderTLSPemPublicKeyFile(nodeId)}`, - `${tmpDir}/hedera.crt`) + for (const nodeId of nodeIds) { + const srcFiles = [] + srcFiles.push(path.join(stagingDir, 'keys', Templates.renderTLSPemPrivateKeyFile(nodeId))) + srcFiles.push(path.join(stagingDir, 'keys', Templates.renderTLSPemPublicKeyFile(nodeId))) - const srcFiles = [] - srcFiles.push(`${tmpDir}/hedera.key`) - srcFiles.push(`${tmpDir}/hedera.crt`) - - return this.copyFiles(podName, srcFiles, constants.HEDERA_HAPI_PATH) + for (const srcFile of srcFiles) { + const fileContents = fs.readFileSync(srcFile) + const fileName = path.basename(srcFile) + data[fileName] = Base64.encode(fileContents) + } + } + if (!await this.k8.createSecret( + 'network-node-hapi-app-secrets', + this._getNamespace(), 'Opaque', data, + undefined, true)) { + throw new FullstackTestingError('failed to create secret for TLS keys') + } } catch (e) { - throw new FullstackTestingError(`failed to copy TLS keys to pod '${podName}': ${e.message}`, e) + this.logger.error('failed to copy TLS keys to secret', e) + throw new FullstackTestingError('failed to copy TLS keys to secret', e) } } + /** + * @param {string} podName + * @param {string} destPath + * @param {string} [mode] + * @param {boolean} [recursive] + * @param {string} [container] + * @returns {Promise} + */ async setPathPermission (podName, destPath, mode = '0755', recursive = true, container = constants.ROOT_CONTAINER) { if (!podName) throw new MissingArgumentError('podName is required') if (!destPath) throw new MissingArgumentError('destPath is required') @@ -249,6 +244,10 @@ export class PlatformInstaller { return true } + /** + * @param {string} podName + * @returns {Promise} + */ async setPlatformDirPermissions (podName) { const self = this if (!podName) throw new MissingArgumentError('podName is required') @@ -269,113 +268,18 @@ export class PlatformInstaller { } /** - * Prepares config.txt file for the node - * @param nodeIDs node IDs - * @param destPath path where config.txt should be written - * @param releaseTag release tag e.g. v0.42.0 - * @param template path to the confit.template file - * @param chainId chain ID (298 for local network) - * @returns {Promise} - */ - async prepareConfigTxt (nodeIDs, destPath, releaseTag, chainId = constants.HEDERA_CHAIN_ID, template = `${constants.RESOURCES_DIR}/templates/config.template`, appName = constants.HEDERA_APP_NAME) { - if (!nodeIDs || nodeIDs.length === 0) throw new MissingArgumentError('list of node IDs is required') - if (!destPath) throw new MissingArgumentError('destPath is required') - if (!template) throw new MissingArgumentError('config templatePath is required') - if (!releaseTag) throw new MissingArgumentError('release tag is required') - - if (!fs.existsSync(path.dirname(destPath))) throw new IllegalArgumentError(`destPath does not exist: ${destPath}`, destPath) - if (!fs.existsSync(template)) throw new IllegalArgumentError(`config templatePath does not exist: ${template}`, destPath) - - // init variables - const internalPort = constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT - const externalPort = constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT - const nodeStakeAmount = constants.HEDERA_NODE_DEFAULT_STAKE_AMOUNT - - const releaseVersion = semver.parse(releaseTag, { includePrerelease: true }) - - try { - const networkNodeServicesMap = await this.accountManager.getNodeServiceMap(this._getNamespace()) - /** @type {string[]} */ - const configLines = [] - configLines.push(`swirld, ${chainId}`) - configLines.push(`app, ${appName}`) - - let nodeSeq = 0 - for (const nodeId of nodeIDs) { - const networkNodeServices = networkNodeServicesMap.get(nodeId) - const nodeName = nodeId - const nodeNickName = nodeId - - const internalIP = Templates.renderFullyQualifiedNetworkPodName(this._getNamespace(), nodeId) - const externalIP = Templates.renderFullyQualifiedNetworkSvcName(this._getNamespace(), nodeId) - - const account = networkNodeServices.accountId - if (releaseVersion.minor >= 40) { - configLines.push(`address, ${nodeSeq}, ${nodeNickName}, ${nodeName}, ${nodeStakeAmount}, ${internalIP}, ${internalPort}, ${externalIP}, ${externalPort}, ${account}`) - } else { - configLines.push(`address, ${nodeSeq}, ${nodeName}, ${nodeStakeAmount}, ${internalIP}, ${internalPort}, ${externalIP}, ${externalPort}, ${account}`) - } - - nodeSeq += 1 - } - - if (releaseVersion.minor >= 41) { - configLines.push(`nextNodeId, ${nodeSeq}`) - } - - fs.writeFileSync(destPath, configLines.join('\n')) - - return configLines - } catch (e) { - throw new FullstackTestingError('failed to generate config.txt', e) - } - } - - /** - * Return a list of task to perform node installation - * - * It assumes the staging directory has the following files and resources: - * ${staging}/keys/s-.key: signing key for a node - * ${staging}/keys/s-.crt: signing cert for a node - * ${staging}/keys/a-.key: agreement key for a node - * ${staging}/keys/a-.crt: agreement cert for a node - * ${staging}/keys/hedera-.key: gRPC TLS key for a node - * ${staging}/keys/hedera-.crt: gRPC TLS cert for a node - * ${staging}/properties: contains all properties files - * ${staging}/log4j2.xml: LOG4J file - * ${staging}/settings.txt: settings.txt file for the network - * ${staging}/config.txt: config.txt file for the network + * Return a list of task to perform node directory setup * * @param podName name of the pod - * @param buildZipFile path to the platform build.zip file - * @param stagingDir staging directory path - * @param nodeIds list of node ids - * @param keyFormat key format (pfx or pem) - * @param force force flag * @returns {Listr} */ - taskInstall (podName, buildZipFile, stagingDir, nodeIds, keyFormat = constants.KEY_FORMAT_PEM, force = false) { + taskSetup (podName) { const self = this return new Listr([ - { - title: 'Copy Gossip keys', - task: (_, task) => - self.copyGossipKeys(podName, stagingDir, nodeIds, keyFormat) - }, - { - title: 'Copy TLS keys', - task: (_, task) => - self.copyTLSKeys(podName, stagingDir, keyFormat) - }, - { - title: 'Copy configuration files', - task: (_, task) => - self.copyPlatformConfigFiles(podName, stagingDir) - }, { title: 'Set file permissions', - task: (_, task) => - self.setPlatformDirPermissions(podName) + task: async (_, task) => + await self.setPlatformDirPermissions(podName) } ], { @@ -383,7 +287,50 @@ export class PlatformInstaller { rendererOptions: { collapseSubtasks: false } + }) + } + + /** + * Return a list of task to copy the node keys to the staging directory + * + * It assumes the staging directory has the following files and resources: + *
  • ${staging}/keys/s-public-.pem: private signing key for a node
  • + *
  • ${staging}/keys/s-private-.pem: public signing key for a node
  • + *
  • ${staging}/keys/a-public-.pem: private agreement key for a node
  • + *
  • ${staging}/keys/a-private-.pem: public agreement key for a node
  • + *
  • ${staging}/keys/hedera-.key: gRPC TLS key for a node
  • + *
  • ${staging}/keys/hedera-.crt: gRPC TLS cert for a node
  • + * + * @param stagingDir staging directory path + * @param nodeIds list of node ids + * @returns {Listr} + */ + copyNodeKeys (stagingDir, nodeIds) { + const self = this + const subTasks = [] + subTasks.push({ + title: 'Copy TLS keys', + task: async (_, task) => + await self.copyTLSKeys(nodeIds, stagingDir) + }) + + for (const nodeId of nodeIds) { + subTasks.push({ + title: `Node: ${chalk.yellow(nodeId)}`, + task: () => new Listr([{ + title: 'Copy Gossip keys', + task: async (_, task) => + await self.copyGossipKeys(nodeId, stagingDir, nodeIds) + } + ], + { + concurrent: false, + rendererOptions: { + collapseSubtasks: false + } + }) + }) } - ) + return subTasks } } diff --git a/src/core/profile_manager.mjs b/src/core/profile_manager.mjs index 96f427743..bc399de3e 100644 --- a/src/core/profile_manager.mjs +++ b/src/core/profile_manager.mjs @@ -14,24 +14,26 @@ * limitations under the License. * */ +'use strict' import fs from 'fs' import path from 'path' import { FullstackTestingError, IllegalArgumentError, MissingArgumentError } from './errors.mjs' import * as yaml from 'js-yaml' import { flags } from '../commands/index.mjs' -import { constants, helpers } from './index.mjs' +import { constants, helpers, Templates } from './index.mjs' import dot from 'dot-object' import { getNodeAccountMap } from './helpers.mjs' +import * as semver from 'semver' +import { readFile, writeFile } from 'fs/promises' const consensusSidecars = [ 'recordStreamUploader', 'eventStreamUploader', 'backupUploader', 'accountBalanceUploader', 'otelCollector'] export class ProfileManager { /** - * Constructor - * @param logger an instance of core/Logger - * @param configManager an instance of core/ConfigManager - * @param cacheDir cache directory where the values file will be written. A yaml file named .yaml is created. + * @param {Logger} logger - an instance of core/Logger + * @param {ConfigManager} configManager - an instance of core/ConfigManager + * @param {string} cacheDir - cache directory where the values file will be written. A yaml file named .yaml is created. */ constructor (logger, configManager, cacheDir = constants.SOLO_VALUES_DIR) { if (!logger) throw new MissingArgumentError('An instance of core/Logger is required') @@ -39,12 +41,18 @@ export class ProfileManager { this.logger = logger this.configManager = configManager + + /** @type {Map} */ this.profiles = new Map() cacheDir = path.resolve(cacheDir) this.cacheDir = cacheDir } + /** + * @param {boolean} [forceReload] + * @returns {Map} + */ loadProfiles (forceReload = false) { const profileFile = this.configManager.getFlag(flags.profileFile) if (!profileFile) throw new MissingArgumentError('profileFile is required') @@ -72,6 +80,10 @@ export class ProfileManager { return this.profiles } + /** + * @param {string} profileName + * @returns {Object} + */ getProfile (profileName) { if (!profileName) throw new MissingArgumentError('profileName is required') if (!this.profiles || this.profiles.size <= 0) { @@ -84,10 +96,10 @@ export class ProfileManager { /** * Set value in the yaml object - * @param itemPath item path in the yaml - * @param value value to be set - * @param yamlRoot root of the yaml object - * @return {*} + * @param {string} itemPath - item path in the yaml + * @param {*} value - value to be set + * @param {Object} yamlRoot - root of the yaml object + * @returns {Object} * @private */ _setValue (itemPath, value, yamlRoot) { @@ -127,9 +139,9 @@ export class ProfileManager { /** * Set items for the chart - * @param itemPath item path in the yaml, if empty then root of the yaml object will be used - * @param items the element object - * @param yamlRoot root of the yaml object to update + * @param {string} itemPath - item path in the yaml, if empty then root of the yaml object will be used + * @param {*} items - the element object + * @param {Object} yamlRoot - root of the yaml object to update * @private */ _setChartItems (itemPath, items, yamlRoot) { @@ -153,6 +165,12 @@ export class ProfileManager { } } + /** + * @param {Object} profile + * @param {string[]} nodeIds + * @param {Object} yamlRoot + * @returns {Object} + */ resourcesForConsensusPod (profile, nodeIds, yamlRoot) { if (!profile) throw new MissingArgumentError('profile is required') @@ -164,6 +182,46 @@ export class ProfileManager { this._setValue(`hedera.nodes.${nodeIndex}.accountId`, accountMap.get(nodeIds[nodeIndex]), yamlRoot) } + const stagingDir = Templates.renderStagingDir( + this.configManager.getFlag(flags.cacheDir), + this.configManager.getFlag(flags.releaseTag) + ) + + if (!fs.existsSync(stagingDir)) { + fs.mkdirSync(stagingDir, { recursive: true }) + } + + const configTxtPath = this.prepareConfigTxt( + this.configManager.getFlag(flags.namespace), + accountMap, + stagingDir, + this.configManager.getFlag(flags.releaseTag), + this.configManager.getFlag(flags.app), + this.configManager.getFlag(flags.chainId)) + + for (const flag of flags.nodeConfigFileFlags.values()) { + const filePath = this.configManager.getFlag(flag) + if (!filePath) { + throw new FullstackTestingError(`Configuration file path is missing for: ${flag.name}`) + } + + const fileName = path.basename(filePath) + const destPath = path.join(stagingDir, 'templates', fileName) + this.logger.debug(`Copying configuration file to staging: ${filePath} -> ${destPath}`) + + fs.cpSync(filePath, destPath, { force: true }) + } + + this._setFileContentsAsValue('hedera.configMaps.configTxt', configTxtPath, yamlRoot) + this._setFileContentsAsValue('hedera.configMaps.log4j2Xml', path.join(stagingDir, 'templates', 'log4j2.xml'), yamlRoot) + this._setFileContentsAsValue('hedera.configMaps.settingsTxt', path.join(stagingDir, 'templates', 'settings.txt'), yamlRoot) + this._setFileContentsAsValue('hedera.configMaps.applicationProperties', path.join(stagingDir, 'templates', 'application.properties'), yamlRoot) + this._setFileContentsAsValue('hedera.configMaps.apiPermissionsProperties', path.join(stagingDir, 'templates', 'api-permission.properties'), yamlRoot) + this._setFileContentsAsValue('hedera.configMaps.bootstrapProperties', path.join(stagingDir, 'templates', 'bootstrap.properties'), yamlRoot) + if (this.configManager.getFlag(flags.applicationEnv)) { + this._setFileContentsAsValue('hedera.configMaps.applicationEnv', this.configManager.getFlag(flags.applicationEnv), yamlRoot) + } + if (profile.consensus) { // set default for consensus pod this._setChartItems('defaults.root', profile.consensus.root, yamlRoot) @@ -177,6 +235,11 @@ export class ProfileManager { return yamlRoot } + /** + * @param {Object} profile + * @param {Object} yamlRoot + * @returns {void} + */ resourcesForHaProxyPod (profile, yamlRoot) { if (!profile) throw new MissingArgumentError('profile is required') if (!profile.haproxy) return // use chart defaults @@ -184,18 +247,33 @@ export class ProfileManager { return this._setChartItems('defaults.haproxy', profile.haproxy, yamlRoot) } + /** + * @param {Object} profile + * @param {Object} yamlRoot + * @returns {void} + */ resourcesForEnvoyProxyPod (profile, yamlRoot) { if (!profile) throw new MissingArgumentError('profile is required') if (!profile.envoyProxy) return // use chart defaults return this._setChartItems('defaults.envoyProxy', profile.envoyProxy, yamlRoot) } + /** + * @param {Object} profile + * @param {Object} yamlRoot + * @returns {void} + */ resourcesForHederaExplorerPod (profile, yamlRoot) { if (!profile) throw new MissingArgumentError('profile is required') if (!profile.explorer) return return this._setChartItems('hedera-explorer', profile.explorer, yamlRoot) } + /** + * @param {Object} profile + * @param {Object} yamlRoot + * @returns {Object} + */ resourcesForMinioTenantPod (profile, yamlRoot) { if (!profile) throw new MissingArgumentError('profile is required') if (!profile.minio || !profile.minio.tenant) return // use chart defaults @@ -217,10 +295,9 @@ export class ProfileManager { /** * Prepare a values file for FST Helm chart * @param {string} profileName resource profile name - * @param {string} applicationEnvFilePath path to the application.env file - * @return {Promise} return the full path to the values file + * @returns {Promise} return the full path to the values file */ - prepareValuesForFstChart (profileName, applicationEnvFilePath = '') { + prepareValuesForFstChart (profileName) { if (!profileName) throw new MissingArgumentError('profileName is required') const profile = this.getProfile(profileName) @@ -234,12 +311,50 @@ export class ProfileManager { this.resourcesForEnvoyProxyPod(profile, yamlRoot) this.resourcesForMinioTenantPod(profile, yamlRoot) - if (applicationEnvFilePath) { - this._setFileContentsAsValue('hedera.configMaps.applicationEnv', applicationEnvFilePath, yamlRoot) + // write the yaml + const cachedValuesFile = path.join(this.cacheDir, `fst-${profileName}.yaml`) + return new Promise((resolve, reject) => { + fs.writeFile(cachedValuesFile, yaml.dump(yamlRoot), (err) => { + if (err) { + reject(err) + } + + resolve(cachedValuesFile) + }) + }) + } + + /** + * @param {PathLike|FileHandle} applicationPropertiesPath + * @returns {Promise} + */ + async bumpHederaConfigVersion (applicationPropertiesPath) { + const lines = (await readFile(applicationPropertiesPath, 'utf-8')).split('\n') + + for (const line of lines) { + if (line.startsWith('hedera.config.version=')) { + const version = parseInt(line.split('=')[1]) + 1 + lines[lines.indexOf(line)] = `hedera.config.version=${version}` + break + } } + await writeFile(applicationPropertiesPath, lines.join('\n')) + } + + /** + * @param {string} configTxtPath + * @param {string} applicationPropertiesPath + * @returns {Promise} + */ + async prepareValuesForNodeAdd (configTxtPath, applicationPropertiesPath) { + const yamlRoot = {} + this._setFileContentsAsValue('hedera.configMaps.configTxt', configTxtPath, yamlRoot) + await this.bumpHederaConfigVersion(applicationPropertiesPath) + this._setFileContentsAsValue('hedera.configMaps.applicationProperties', applicationPropertiesPath, yamlRoot) + // write the yaml - const cachedValuesFile = path.join(this.cacheDir, `fst-${profileName}.yaml`) + const cachedValuesFile = path.join(this.cacheDir, 'fst-node-add.yaml') return new Promise((resolve, reject) => { fs.writeFile(cachedValuesFile, yaml.dump(yamlRoot), (err) => { if (err) { @@ -253,8 +368,8 @@ export class ProfileManager { /** * Prepare a values file for rpc-relay Helm chart - * @param profileName resource profile name - * @return {Promise} return the full path to the values file + * @param {string} profileName - resource profile name + * @returns {Promise} return the full path to the values file */ prepareValuesForRpcRelayChart (profileName) { if (!profileName) throw new MissingArgumentError('profileName is required') @@ -280,8 +395,8 @@ export class ProfileManager { /** * Prepare a values file for mirror-node Helm chart - * @param profileName resource profile name - * @return {Promise} return the full path to the values file + * @param {string} profileName - resource profile name + * @returns {Promise} return the full path to the values file */ prepareValuesForMirrorNodeChart (profileName) { if (!profileName) throw new MissingArgumentError('profileName is required') @@ -320,13 +435,72 @@ export class ProfileManager { /** * Writes the contents of a file as a value for the given nested item path in the yaml object - * @param {string} itemPath nested item path in the yaml object to store the file contents - * @param {string} valueFilePath path to the file whose contents will be stored in the yaml object - * @param {Object} yamlRoot root of the yaml object + * @param {string} itemPath - nested item path in the yaml object to store the file contents + * @param {string} valueFilePath - path to the file whose contents will be stored in the yaml object + * @param {Object} yamlRoot - root of the yaml object * @private */ _setFileContentsAsValue (itemPath, valueFilePath, yamlRoot) { const fileContents = fs.readFileSync(valueFilePath, 'utf8') this._setValue(itemPath, fileContents, yamlRoot) } + + /** + * Prepares config.txt file for the node + * @param {string} namespace - namespace where the network is deployed + * @param {Map} nodeAccountMap - the map of node IDs to account IDs + * @param {string} destPath - path to the destination directory to write the config.txt file + * @param {string} releaseTag - release tag e.g. v0.42.0 + * @param {string} [appName] - the app name (default: HederaNode.jar) + * @param {string} [chainId] - chain ID (298 for local network) + * @returns {string} the config.txt file path + */ + prepareConfigTxt (namespace, nodeAccountMap, destPath, releaseTag, appName = constants.HEDERA_APP_NAME, chainId = constants.HEDERA_CHAIN_ID) { + if (!nodeAccountMap || nodeAccountMap.size === 0) throw new MissingArgumentError('nodeAccountMap the map of node IDs to account IDs is required') + if (!releaseTag) throw new MissingArgumentError('release tag is required') + + if (!fs.existsSync(destPath)) throw new IllegalArgumentError(`config destPath does not exist: ${destPath}`, destPath) + + // init variables + const internalPort = constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT + const externalPort = constants.HEDERA_NODE_EXTERNAL_GOSSIP_PORT + const nodeStakeAmount = constants.HEDERA_NODE_DEFAULT_STAKE_AMOUNT + + const releaseVersion = semver.parse(releaseTag, { includePrerelease: true }) + + try { + /** @type {string[]} */ + const configLines = [] + configLines.push(`swirld, ${chainId}`) + configLines.push(`app, ${appName}`) + + let nodeSeq = 0 + for (const nodeID of nodeAccountMap.keys()) { + const nodeName = nodeID + + const internalIP = Templates.renderFullyQualifiedNetworkPodName(namespace, nodeName) + const externalIP = Templates.renderFullyQualifiedNetworkSvcName(namespace, nodeName) + + const account = nodeAccountMap.get(nodeID) + if (releaseVersion.minor >= 40) { + configLines.push(`address, ${nodeSeq}, ${nodeSeq}, ${nodeName}, ${nodeStakeAmount}, ${internalIP}, ${internalPort}, ${externalIP}, ${externalPort}, ${account}`) + } else { + configLines.push(`address, ${nodeSeq}, ${nodeName}, ${nodeStakeAmount}, ${internalIP}, ${internalPort}, ${externalIP}, ${externalPort}, ${account}`) + } + + nodeSeq += 1 + } + + if (releaseVersion.minor >= 41) { + configLines.push(`nextNodeId, ${nodeSeq}`) + } + + const configFilePath = path.join(destPath, 'config.txt') + fs.writeFileSync(configFilePath, configLines.join('\n')) + + return configFilePath + } catch (e) { + throw new FullstackTestingError('failed to generate config.txt', e) + } + } } diff --git a/src/core/shell_runner.mjs b/src/core/shell_runner.mjs index aac204b88..fe545d7b7 100644 --- a/src/core/shell_runner.mjs +++ b/src/core/shell_runner.mjs @@ -14,10 +14,14 @@ * limitations under the License. * */ +'use strict' import { spawn } from 'child_process' import chalk from 'chalk' export class ShellRunner { + /** + * @param {Logger} logger + */ constructor (logger) { if (!logger) throw new Error('An instance of core/Logger is required') this.logger = logger @@ -25,9 +29,9 @@ export class ShellRunner { /** * Returns a promise that invokes the shell command - * @param {string} cmd shell command string + * @param {string} cmd - shell command string * @param {boolean} verbose - if true, the output will be shown in the console - * @returns {Promise} console output as an array of strings + * @returns {Promise} console output as an array of strings */ async run (cmd, verbose = false) { const self = this diff --git a/src/core/templates.mjs b/src/core/templates.mjs index c7f8e20c7..21214e46d 100644 --- a/src/core/templates.mjs +++ b/src/core/templates.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import * as x509 from '@peculiar/x509' import os from 'os' import path from 'path' @@ -21,62 +22,98 @@ import { DataValidationError, FullstackTestingError, IllegalArgumentError, Missi import { constants } from './index.mjs' export class Templates { + /** + * @param {string} nodeId + * @returns {string} + */ static renderNetworkPodName (nodeId) { return `network-${nodeId}-0` } + /** + * @param {string} nodeId + * @returns {string} + */ static renderNetworkSvcName (nodeId) { return `network-${nodeId}-svc` } + /** + * @param {string} svcName + * @returns {string} + */ static nodeIdFromNetworkSvcName (svcName) { return svcName.split('-').slice(1, -1).join('-') } + /** + * @param {string} nodeId + * @returns {string} + */ static renderNetworkHeadlessSvcName (nodeId) { return `network-${nodeId}` } /** - * Generate pfx node private key file name - * @param nodeId node ID + * @param {string} prefix + * @param {string} nodeId * @returns {string} */ - static renderGossipPfxPrivateKeyFile (nodeId) { - return `private-${nodeId}.pfx` - } - static renderGossipPemPrivateKeyFile (prefix, nodeId) { - // s-node0-key.pem return `${prefix}-private-${nodeId}.pem` } + /** + * @param {string} prefix + * @param {string} nodeId + * @returns {string} + */ static renderGossipPemPublicKeyFile (prefix, nodeId) { - // s-node0-cert.pem return `${prefix}-public-${nodeId}.pem` } + /** + * @param {string} nodeId + * @returns {string} + */ static renderTLSPemPrivateKeyFile (nodeId) { return `hedera-${nodeId}.key` } + /** + * @param {string} nodeId + * @returns {string} + */ static renderTLSPemPublicKeyFile (nodeId) { - // s-node0-cert.pem return `hedera-${nodeId}.crt` } + /** + * @param {string} prefix + * @param {string} nodeId + * @param {string} [suffix] + * @returns {string} + */ static renderNodeFriendlyName (prefix, nodeId, suffix = '') { const parts = [prefix, nodeId] if (suffix) parts.push(suffix) return parts.join('-') } + /** + * @param {string} podName + * @returns {string} + */ static extractNodeIdFromPodName (podName) { const parts = podName.split('-') if (parts.length !== 3) throw new DataValidationError(`pod name is malformed : ${podName}`, 3, parts.length) return parts[1].trim() } + /** + * @param {string} tag + * @returns {string} + */ static prepareReleasePrefix (tag) { if (!tag) throw new MissingArgumentError('tag cannot be empty') @@ -87,7 +124,7 @@ export class Templates { /** * renders the name to be used to store the new account key as a Kubernetes secret - * @param accountId the account ID, string or AccountId type + * @param {AccountId|string} accountId * @returns {string} the name of the Kubernetes secret to store the account key */ static renderAccountKeySecretName (accountId) { @@ -96,7 +133,7 @@ export class Templates { /** * renders the label selector to be used to fetch the new account key from the Kubernetes secret - * @param accountId the account ID, string or AccountId type + * @param {AccountId|string} accountId * @returns {string} the label selector of the Kubernetes secret to retrieve the account key */ static renderAccountKeySecretLabelSelector (accountId) { return `fullstack.hedera.com/account-id=${accountId.toString()}` @@ -104,7 +141,7 @@ export class Templates { /** * renders the label object to be used to store the new account key in the Kubernetes secret - * @param accountId the account ID, string or AccountId type + * @param {AccountId|string} accountId * @returns {{'fullstack.hedera.com/account-id': string}} the label object to be used to * store the new account key in the Kubernetes secret */ @@ -114,6 +151,15 @@ export class Templates { } } + /** + * @param {string} nodeId + * @param {string} [state] + * @param {string} [locality] + * @param {string} [org] + * @param {string} [orgUnit] + * @param {string} [country] + * @returns {x509.Name} + */ static renderDistinguishedName (nodeId, state = 'TX', locality = 'Richardson', @@ -124,14 +170,16 @@ export class Templates { return new x509.Name(`CN=${nodeId},ST=${state},L=${locality},O=${org},OU=${orgUnit},C=${country}`) } - static renderStagingDir (configManager, flags) { - if (!configManager) throw new MissingArgumentError('configManager is required') - const cacheDir = configManager.getFlag(flags.cacheDir) + /** + * @param {string} cacheDir + * @param {string} releaseTag + * @returns {string} + */ + static renderStagingDir (cacheDir, releaseTag) { if (!cacheDir) { throw new IllegalArgumentError('cacheDir cannot be empty') } - const releaseTag = configManager.getFlag(flags.releaseTag) if (!releaseTag) { throw new IllegalArgumentError('releaseTag cannot be empty') } @@ -141,9 +189,15 @@ export class Templates { throw new IllegalArgumentError('releasePrefix cannot be empty') } - return path.resolve(`${cacheDir}/${releasePrefix}/staging/${releaseTag}`) + return path.resolve(path.join(cacheDir, releasePrefix, 'staging', releaseTag)) } + /** + * @param {string} dep + * @param {NodeJS.Platform} [osPlatform] + * @param {string} [installationDir] + * @returns {string} + */ static installationPath ( dep, osPlatform = os.platform(), @@ -168,16 +222,52 @@ export class Templates { } } + /** + * @param {string} namespace + * @param {string} nodeId + * @returns {string} + */ static renderFullyQualifiedNetworkPodName (namespace, nodeId) { return `${Templates.renderNetworkPodName(nodeId)}.${Templates.renderNetworkHeadlessSvcName(nodeId)}.${namespace}.svc.cluster.local` } + /** + * @param {string} namespace + * @param {string} nodeId + * @returns {string} + */ static renderFullyQualifiedNetworkSvcName (namespace, nodeId) { return `${Templates.renderNetworkSvcName(nodeId)}.${namespace}.svc.cluster.local` } + /** + * @param {string} svcName + * @returns {string} + */ static nodeIdFromFullyQualifiedNetworkSvcName (svcName) { const parts = svcName.split('.') return this.nodeIdFromNetworkSvcName(parts[0]) } + + /** + * @param {string} nodeId + * @returns {number} + */ + static nodeNumberFromNodeId (nodeId) { + for (let i = nodeId.length - 1; i > 0; i--) { + if (isNaN(nodeId[i])) { + return parseInt(nodeId.substring(i + 1, nodeId.length)) + } + } + } + + static renderGossipKeySecretName (nodeId) { + return `network-${nodeId}-keys-secrets` + } + + static renderGossipKeySecretLabelObject (nodeId) { + return { + 'fullstack.hedera.com/node-name': nodeId + } + } } diff --git a/src/core/zippy.mjs b/src/core/zippy.mjs index 0c362405b..512d9bb08 100644 --- a/src/core/zippy.mjs +++ b/src/core/zippy.mjs @@ -14,6 +14,7 @@ * limitations under the License. * */ +'use strict' import { FullstackTestingError, IllegalArgumentError, MissingArgumentError } from './errors.mjs' import fs from 'fs' import AdmZip from 'adm-zip' @@ -22,6 +23,9 @@ import chalk from 'chalk' import path from 'path' export class Zippy { + /** + * @param {Logger} logger + */ constructor (logger) { if (!logger) throw new Error('An instance of core/Logger is required') this.logger = logger @@ -29,9 +33,9 @@ export class Zippy { /** * Zip a file or directory - * @param {string} srcPath path to a file or directory - * @param {string} destPath path to the output zip file - * @param {boolean} verbose if true, log the progress + * @param {string} srcPath - path to a file or directory + * @param {string} destPath - path to the output zip file + * @param {boolean} [verbose] - if true, log the progress * @returns {Promise} path to the output zip file */ async zip (srcPath, destPath, verbose = false) { @@ -57,6 +61,12 @@ export class Zippy { } } + /** + * @param {string} srcPath + * @param {string} destPath + * @param {boolean} [verbose] + * @returns {Promise} + */ async unzip (srcPath, destPath, verbose = false) { const self = this @@ -88,6 +98,11 @@ export class Zippy { } } + /** + * @param {string} srcPath + * @param {string} destPath + * @returns {Promise} + */ async tar (srcPath, destPath) { if (!srcPath) throw new MissingArgumentError('srcPath is required') if (!destPath) throw new MissingArgumentError('destPath is required') @@ -107,6 +122,11 @@ export class Zippy { } } + /** + * @param {string} srcPath + * @param {string} destPath + * @returns {Promise} + */ async untar (srcPath, destPath) { if (!srcPath) throw new MissingArgumentError('srcPath is required') if (!destPath) throw new MissingArgumentError('destPath is required') diff --git a/test/data/build-v0.54.0-alpha.4.zip b/test/data/build-v0.54.0-alpha.4.zip new file mode 100644 index 000000000..3e5660cb6 Binary files /dev/null and b/test/data/build-v0.54.0-alpha.4.zip differ diff --git a/test/data/local-ptt-app-values.yaml b/test/data/local-ptt-app-values.yaml deleted file mode 100644 index 808dfca8e..000000000 --- a/test/data/local-ptt-app-values.yaml +++ /dev/null @@ -1,5 +0,0 @@ -defaults: - root: - extraEnv: - - name: JAVA_MAIN_CLASS - value: com.swirlds.platform.Browser diff --git a/test/e2e/commands/account.test.mjs b/test/e2e/commands/account.test.mjs index 20b92fdd4..9211157ee 100644 --- a/test/e2e/commands/account.test.mjs +++ b/test/e2e/commands/account.test.mjs @@ -47,8 +47,7 @@ describe('AccountCommand', () => { const argv = getDefaultArgv() argv[flags.namespace.name] = namespace argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PEM - argv[flags.nodeIDs.name] = 'node0' + argv[flags.nodeIDs.name] = 'node1' argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.clusterName.name] = TEST_CLUSTER @@ -56,11 +55,12 @@ describe('AccountCommand', () => { // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined const bootstrapResp = bootstrapNetwork(testName, argv) + const accountCmd = new AccountCommand(bootstrapResp.opts, testSystemAccounts) + bootstrapResp.cmd.accountCmd = accountCmd const k8 = bootstrapResp.opts.k8 const accountManager = bootstrapResp.opts.accountManager const configManager = bootstrapResp.opts.configManager const nodeCmd = bootstrapResp.cmd.nodeCmd - const accountCmd = new AccountCommand(bootstrapResp.opts, testSystemAccounts) afterAll(async () => { await getNodeLogs(k8, namespace) @@ -106,6 +106,7 @@ describe('AccountCommand', () => { nodeCmd.logger.info(`Fetching account keys: accountId ${accountId}`) const keys = await accountManager.getAccountKeys(accountId) nodeCmd.logger.info(`Fetched account keys: accountId ${accountId}`) + expect(keys.length).not.toEqual(0) expect(keys[0].toString()).not.toEqual(genesisKey.toString()) }, 20000) } diff --git a/test/e2e/commands/cluster.test.mjs b/test/e2e/commands/cluster.test.mjs index 41b4d0939..36c07572b 100644 --- a/test/e2e/commands/cluster.test.mjs +++ b/test/e2e/commands/cluster.test.mjs @@ -34,7 +34,7 @@ import { logging } from '../../../src/core/index.mjs' import { flags } from '../../../src/commands/index.mjs' -import { getNodeLogs, sleep } from '../../../src/core/helpers.mjs' +import { sleep } from '../../../src/core/helpers.mjs' import * as version from '../../../version.mjs' describe('ClusterCommand', () => { @@ -47,8 +47,7 @@ describe('ClusterCommand', () => { const argv = getDefaultArgv() argv[flags.namespace.name] = namespace argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PEM - argv[flags.nodeIDs.name] = 'node0' + argv[flags.nodeIDs.name] = 'node1' argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.clusterName.name] = TEST_CLUSTER @@ -59,16 +58,19 @@ describe('ClusterCommand', () => { const bootstrapResp = bootstrapTestVariables(testName, argv) const k8 = bootstrapResp.opts.k8 - const accountManager = bootstrapResp.opts.accountManager const configManager = bootstrapResp.opts.configManager const chartManager = bootstrapResp.opts.chartManager const clusterCmd = bootstrapResp.cmd.clusterCmd afterAll(async () => { - await getNodeLogs(k8, namespace) await k8.deleteNamespace(namespace) - await accountManager.close() + argv[flags.clusterSetupNamespace.name] = constants.FULLSTACK_SETUP_NAMESPACE + configManager.update(argv, true) + await clusterCmd.setup(argv) // restore fullstack-cluster-setup for other e2e tests to leverage + do { + await sleep(5000) + } while (!await chartManager.isChartInstalled(constants.FULLSTACK_SETUP_NAMESPACE, constants.FULLSTACK_CLUSTER_SETUP_CHART)) }, 180000) beforeEach(() => { diff --git a/test/e2e/commands/mirror_node.test.mjs b/test/e2e/commands/mirror_node.test.mjs index 17dbee580..1d94da56b 100644 --- a/test/e2e/commands/mirror_node.test.mjs +++ b/test/e2e/commands/mirror_node.test.mjs @@ -26,9 +26,7 @@ import { } from '@jest/globals' import { flags } from '../../../src/commands/index.mjs' import { - constants -} from '../../../src/core/index.mjs' -import { + accountCreationShouldSucceed, balanceQueryShouldSucceed, bootstrapNetwork, getDefaultArgv, @@ -48,9 +46,8 @@ describe('MirrorNodeCommand', () => { const argv = getDefaultArgv() argv[flags.namespace.name] = namespace argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PEM - argv[flags.nodeIDs.name] = 'node0' // use a single node to reduce resource during e2e tests + argv[flags.nodeIDs.name] = 'node1' // use a single node to reduce resource during e2e tests argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.clusterName.name] = TEST_CLUSTER @@ -80,7 +77,7 @@ describe('MirrorNodeCommand', () => { await accountManager.close() bootstrapResp.opts.logger.showUser(`------------------------- END: ${testName} ----------------------------`) - }, 180000) + }, 180_000) afterEach(async () => { await sleep(500) // give a few ticks so that connections can close @@ -89,14 +86,22 @@ describe('MirrorNodeCommand', () => { balanceQueryShouldSucceed(accountManager, mirrorNodeCmd, namespace) it('mirror node deploy should success', async () => { - expect.assertions(1) + expect.assertions(2) try { await expect(mirrorNodeCmd.deploy(argv)).resolves.toBeTruthy() } catch (e) { mirrorNodeCmd.logger.showUserError(e) expect(e).toBeNull() } - }, 600000) + + expect(mirrorNodeCmd.getUnusedConfigs(MirrorNodeCommand.DEPLOY_CONFIGS_NAME)).toEqual([ + flags.hederaExplorerTlsHostName.constName, + flags.hederaExplorerTlsLoadBalancerIp.constName, + flags.profileFile.constName, + flags.profileName.constName, + flags.tlsClusterIssuerType.constName + ]) + }, 600_000) it('mirror node API should be running', async () => { await accountManager.loadNodeClient(namespace) @@ -106,32 +111,32 @@ describe('MirrorNodeCommand', () => { const pods = await k8.getPodsByLabel(['app.kubernetes.io/name=hedera-explorer']) const explorerPod = pods[0] - portForwarder = await k8.portForward(explorerPod.metadata.name, 8080, 8080) - await sleep(2000) + portForwarder = await k8.portForward(explorerPod.metadata.name, 8_080, 8_080) + await sleep(2_000) // check if mirror node api server is running const apiURL = 'http://127.0.0.1:8080/api/v1/transactions' await expect(downloader.urlExists(apiURL)).resolves.toBeTruthy() - await sleep(2000) + await sleep(2_000) } catch (e) { mirrorNodeCmd.logger.showUserError(e) expect(e).toBeNull() } - }, 60000) + }, 60_000) it('Explorer GUI should be running', async () => { expect.assertions(1) try { const guiURL = 'http://127.0.0.1:8080/localnet/dashboard' await expect(downloader.urlExists(guiURL)).resolves.toBeTruthy() - await sleep(2000) + await sleep(2_000) mirrorNodeCmd.logger.debug('mirror node API and explorer GUI are running') } catch (e) { mirrorNodeCmd.logger.showUserError(e) expect(e).toBeNull() } - }, 60000) + }, 60_000) it('Create topic and submit message should success', async () => { expect.assertions(1) @@ -153,7 +158,11 @@ describe('MirrorNodeCommand', () => { mirrorNodeCmd.logger.showUserError(e) expect(e).toBeNull() } - }, 60000) + }, 60_000) + + // trigger some extra transactions to trigger MirrorNode to fetch the transactions + accountCreationShouldSucceed(accountManager, mirrorNodeCmd, namespace) + accountCreationShouldSucceed(accountManager, mirrorNodeCmd, namespace) it('Check submit message result should success', async () => { expect.assertions(1) @@ -187,16 +196,16 @@ describe('MirrorNodeCommand', () => { mirrorNodeCmd.logger.debug(`problem with request: ${e.message}`) }) req.end() // make the request - await sleep(2000) + await sleep(2_000) } - await sleep(1000) + await sleep(1_000) expect(receivedMessage).toBe(testMessage) await k8.stopPortForward(portForwarder) } catch (e) { mirrorNodeCmd.logger.showUserError(e) expect(e).toBeNull() } - }, 300000) + }, 300_000) it('mirror node destroy should success', async () => { expect.assertions(1) @@ -206,5 +215,23 @@ describe('MirrorNodeCommand', () => { mirrorNodeCmd.logger.showUserError(e) expect(e).toBeNull() } - }, 60000) + }, 60_000) + + it('should apply the mirror node version from the --mirror-node-version flag', async () => { + const mirrorNodeVersion = '0.111.1' + const customArgv = { [flags.mirrorNodeVersion.constName]: mirrorNodeVersion, ...argv } + + const valuesArg = await mirrorNodeCmd.prepareValuesArg(customArgv) + + expect(valuesArg).toContain(`--set global.image.tag=${mirrorNodeVersion}`) + }, 5_000) + + it('should not apply the mirror node version from the --mirror-node-version flag if left empty', async () => { + const mirrorNodeVersion = '' + const customArgv = { [flags.mirrorNodeVersion.constName]: mirrorNodeVersion, ...argv } + + const valuesArg = await mirrorNodeCmd.prepareValuesArg(customArgv) + + expect(valuesArg).not.toContain('--set global.image.tag=') + }, 5_000) }) diff --git a/test/e2e/commands/network.test.mjs b/test/e2e/commands/network.test.mjs index 966971d9a..80570aa8f 100644 --- a/test/e2e/commands/network.test.mjs +++ b/test/e2e/commands/network.test.mjs @@ -37,6 +37,7 @@ import * as version from '../../../version.mjs' import { getNodeLogs, sleep } from '../../../src/core/helpers.mjs' import path from 'path' import fs from 'fs' +import { NetworkCommand } from '../../../src/commands/network.mjs' describe('NetworkCommand', () => { const testName = 'network-cmd-e2e' @@ -47,8 +48,7 @@ describe('NetworkCommand', () => { const argv = getDefaultArgv() argv[flags.namespace.name] = namespace argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PEM - argv[flags.nodeIDs.name] = 'node0' + argv[flags.nodeIDs.name] = 'node1' argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.deployMinio.name] = true @@ -65,6 +65,8 @@ describe('NetworkCommand', () => { const networkCmd = bootstrapResp.cmd.networkCmd const clusterCmd = bootstrapResp.cmd.clusterCmd + const initCmd = bootstrapResp.cmd.initCmd + const nodeCmd = bootstrapResp.cmd.nodeCmd afterAll(async () => { await getNodeLogs(k8, namespace) @@ -73,22 +75,39 @@ describe('NetworkCommand', () => { }, 180000) beforeAll(async () => { + await initCmd.init(argv) await clusterCmd.setup(argv) fs.mkdirSync(applicationEnvParentDirectory, { recursive: true }) fs.writeFileSync(applicationEnvFilePath, applicationEnvFileContents) }) + it('keys should be generated', async () => { + await expect(nodeCmd.keys(argv)).resolves.toBeTruthy() + }) + it('network deploy command should succeed', async () => { - expect.assertions(2) + expect.assertions(3) try { await expect(networkCmd.deploy(argv)).resolves.toBeTruthy() // check pod names should match expected values - await expect(k8.getPodByName('network-node0-0')) - .resolves.toHaveProperty('metadata.name', 'network-node0-0') + await expect(k8.getPodByName('network-node1-0')) + .resolves.toHaveProperty('metadata.name', 'network-node1-0') // get list of pvc using k8 listPvcsByNamespace function and print to log const pvcs = await k8.listPvcsByNamespace(namespace) networkCmd.logger.showList('PVCs', pvcs) + + expect(networkCmd.getUnusedConfigs(NetworkCommand.DEPLOY_CONFIGS_NAME)).toEqual([ + flags.apiPermissionProperties.constName, + flags.applicationEnv.constName, + flags.applicationProperties.constName, + flags.bootstrapProperties.constName, + flags.chainId.constName, + flags.log4j2Xml.constName, + flags.profileFile.constName, + flags.profileName.constName, + flags.settingTxt.constName + ]) } catch (e) { networkCmd.logger.showUserError(e) expect(e).toBeNull() @@ -107,6 +126,7 @@ describe('NetworkCommand', () => { it('network destroy should success', async () => { argv[flags.deletePvcs.name] = true argv[flags.deleteSecrets.name] = true + argv[flags.force.name] = true configManager.update(argv, true) expect.assertions(4) diff --git a/test/e2e/commands/node_add.test.mjs b/test/e2e/commands/node_add.test.mjs new file mode 100644 index 000000000..d498348df --- /dev/null +++ b/test/e2e/commands/node_add.test.mjs @@ -0,0 +1,24 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * @jest-environment steps + */ +import { describe } from '@jest/globals' +import { testNodeAdd } from '../../test_add.mjs' + +describe('Node add with released hedera', () => { + const localBuildPath = '' + testNodeAdd(localBuildPath) +}, 180000) diff --git a/test/e2e/commands/node_add_local.test.mjs b/test/e2e/commands/node_add_local.test.mjs new file mode 100644 index 000000000..abbeea7f1 --- /dev/null +++ b/test/e2e/commands/node_add_local.test.mjs @@ -0,0 +1,24 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * @jest-environment steps + */ +import { describe } from '@jest/globals' +import { testNodeAdd } from '../../test_add.mjs' + +describe('Node add with hedera local build', () => { + const localBuildPath = 'node1=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node3=../hedera-services/hedera-node/data' + testNodeAdd(localBuildPath) +}, 180000) diff --git a/test/e2e/commands/node_delete.test.mjs b/test/e2e/commands/node_delete.test.mjs new file mode 100644 index 000000000..9687fc168 --- /dev/null +++ b/test/e2e/commands/node_delete.test.mjs @@ -0,0 +1,85 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * @jest-environment steps + */ +import { afterAll, describe, expect, it } from '@jest/globals' +import { flags } from '../../../src/commands/index.mjs' +import { + accountCreationShouldSucceed, + balanceQueryShouldSucceed, + bootstrapNetwork, + getDefaultArgv, + HEDERA_PLATFORM_VERSION_TAG +} from '../../test_util.js' +import { getNodeLogs, getTmpDir } from '../../../src/core/helpers.mjs' +import { NodeCommand } from '../../../src/commands/node.mjs' +import { HEDERA_HAPI_PATH, ROOT_CONTAINER } from '../../../src/core/constants.mjs' +import fs from 'fs' + +describe('Node delete', () => { + const namespace = 'node-delete' + const nodeId = 'node1' + const argv = getDefaultArgv() + argv[flags.nodeIDs.name] = 'node1,node2,node3,node4' + argv[flags.nodeID.name] = nodeId + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + argv[flags.persistentVolumeClaims.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined + argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG + argv[flags.namespace.name] = namespace + const bootstrapResp = bootstrapNetwork(namespace, argv) + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const k8 = bootstrapResp.opts.k8 + + afterAll(async () => { + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }, 600000) + + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).toBeTruthy() + }, 450000) + + it('should delete a new node to the network successfully', async () => { + await nodeCmd.delete(argv) + expect(nodeCmd.getUnusedConfigs(NodeCommand.DELETE_CONFIGS_NAME)).toEqual([ + flags.app.constName, + flags.devMode.constName, + flags.endpointType.constName + ]) + + await nodeCmd.accountManager.close() + }, 600000) + + balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + it('config.txt should no longer contain removed nodeid', async () => { + // read config.txt file from first node, read config.txt line by line, it should not contain value of nodeId + const pods = await k8.getPodsByLabel(['fullstack.hedera.com/type=network-node']) + const podName = pods[0].metadata.name + const tmpDir = getTmpDir() + await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) + const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') + console.log('config.txt:', configTxt) + expect(configTxt).not.toContain(nodeId) + }, 600000) +}) diff --git a/test/e2e/commands/node-local-hedera.test.mjs b/test/e2e/commands/node_local_hedera.test.mjs similarity index 85% rename from test/e2e/commands/node-local-hedera.test.mjs rename to test/e2e/commands/node_local_hedera.test.mjs index dbb6f5ef6..8a361f3af 100644 --- a/test/e2e/commands/node-local-hedera.test.mjs +++ b/test/e2e/commands/node_local_hedera.test.mjs @@ -19,9 +19,6 @@ import { describe } from '@jest/globals' import { flags } from '../../../src/commands/index.mjs' -import { - constants -} from '../../../src/core/index.mjs' import { bootstrapNetwork, getDefaultArgv, @@ -32,8 +29,7 @@ import { getNodeLogs } from '../../../src/core/helpers.mjs' describe('Node local build', () => { const LOCAL_HEDERA = 'local-hedera-app' const argv = getDefaultArgv() - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PFX - argv[flags.nodeIDs.name] = 'node0,node1,node2' + argv[flags.nodeIDs.name] = 'node1,node2,node3' argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.clusterName.name] = TEST_CLUSTER @@ -44,11 +40,11 @@ describe('Node local build', () => { afterAll(async () => { await getNodeLogs(hederaK8, LOCAL_HEDERA) await hederaK8.deleteNamespace(LOCAL_HEDERA) - }, 120000) + }, 600000) describe('Node for hedera app should start successfully', () => { console.log('Starting local build for Hedera app') - argv[flags.localBuildPath.name] = 'node0=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node2=../hedera-services/hedera-node/data' + argv[flags.localBuildPath.name] = 'node1=../hedera-services/hedera-node/data/,../hedera-services/hedera-node/data,node3=../hedera-services/hedera-node/data' argv[flags.namespace.name] = LOCAL_HEDERA const bootstrapResp = bootstrapNetwork(LOCAL_HEDERA, argv) hederaK8 = bootstrapResp.opts.k8 diff --git a/test/e2e/commands/node-local-ptt.test.mjs b/test/e2e/commands/node_local_ptt.test.mjs similarity index 90% rename from test/e2e/commands/node-local-ptt.test.mjs rename to test/e2e/commands/node_local_ptt.test.mjs index 2ef952b0d..f227152ab 100644 --- a/test/e2e/commands/node-local-ptt.test.mjs +++ b/test/e2e/commands/node_local_ptt.test.mjs @@ -13,15 +13,13 @@ * See the License for the specific language governing permissions and * limitations under the License. * + * @jest-environment steps */ import { afterAll, describe } from '@jest/globals' import { flags } from '../../../src/commands/index.mjs' -import { - constants -} from '../../../src/core/index.mjs' import { bootstrapNetwork, getDefaultArgv, @@ -32,14 +30,12 @@ import { getNodeLogs } from '../../../src/core/helpers.mjs' describe('Node local build', () => { const LOCAL_PTT = 'local-ptt-app' const argv = getDefaultArgv() - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PFX - argv[flags.nodeIDs.name] = 'node0,node1,node2' + argv[flags.nodeIDs.name] = 'node1,node2,node3' argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.clusterName.name] = TEST_CLUSTER // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined - argv[flags.valuesFile.name] = `test/data/${LOCAL_PTT}-values.yaml` let pttK8 afterAll(async () => { diff --git a/test/e2e/commands/node_pem_stop_add.test.mjs b/test/e2e/commands/node_pem_kill.test.mjs similarity index 78% rename from test/e2e/commands/node_pem_stop_add.test.mjs rename to test/e2e/commands/node_pem_kill.test.mjs index d65d65d6a..134699a4c 100644 --- a/test/e2e/commands/node_pem_stop_add.test.mjs +++ b/test/e2e/commands/node_pem_kill.test.mjs @@ -17,9 +17,8 @@ */ import { describe } from '@jest/globals' -import { constants } from '../../../src/core/index.mjs' -import { e2eNodeKeyRefreshAddTest } from '../e2e_node_util.js' +import { e2eNodeKeyRefreshTest } from '../e2e_node_util.js' describe('NodeCommand', () => { - e2eNodeKeyRefreshAddTest(constants.KEY_FORMAT_PEM, 'node-cmd-e2e-pem', 'stop') + e2eNodeKeyRefreshTest('node-cmd-e2e-pem-kill', 'kill') }) diff --git a/test/e2e/commands/node_pfx_kill_add.test.mjs b/test/e2e/commands/node_pem_stop.test.mjs similarity index 78% rename from test/e2e/commands/node_pfx_kill_add.test.mjs rename to test/e2e/commands/node_pem_stop.test.mjs index 912b63521..7d1749cd7 100644 --- a/test/e2e/commands/node_pfx_kill_add.test.mjs +++ b/test/e2e/commands/node_pem_stop.test.mjs @@ -17,9 +17,8 @@ */ import { describe } from '@jest/globals' -import { constants } from '../../../src/core/index.mjs' -import { e2eNodeKeyRefreshAddTest } from '../e2e_node_util.js' +import { e2eNodeKeyRefreshTest } from '../e2e_node_util.js' describe('NodeCommand', () => { - e2eNodeKeyRefreshAddTest(constants.KEY_FORMAT_PFX, 'node-cmd-e2e-pfx', 'kill') + e2eNodeKeyRefreshTest('node-cmd-e2e-pem-stop', 'stop') }) diff --git a/test/e2e/commands/node_update.test.mjs b/test/e2e/commands/node_update.test.mjs new file mode 100644 index 000000000..8ef0afe44 --- /dev/null +++ b/test/e2e/commands/node_update.test.mjs @@ -0,0 +1,132 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * @jest-environment steps + */ +import { afterAll, describe, expect, it } from '@jest/globals' +import { flags } from '../../../src/commands/index.mjs' +import { constants } from '../../../src/core/index.mjs' +import { + accountCreationShouldSucceed, + balanceQueryShouldSucceed, + bootstrapNetwork, + getDefaultArgv, getNodeIdsPrivateKeysHash, getTmpDir, + HEDERA_PLATFORM_VERSION_TAG +} from '../../test_util.js' +import { getNodeLogs } from '../../../src/core/helpers.mjs' +import { NodeCommand } from '../../../src/commands/node.mjs' +import { HEDERA_HAPI_PATH, ROOT_CONTAINER } from '../../../src/core/constants.mjs' +import fs from 'fs' + +describe('Node update', () => { + const defaultTimeout = 120000 + const namespace = 'node-update' + const updateNodeId = 'node2' + const newAccountId = '0.0.7' + const argv = getDefaultArgv() + argv[flags.nodeIDs.name] = 'node1,node2,node3' + argv[flags.nodeID.name] = updateNodeId + + argv[flags.newAccountNumber.name] = newAccountId + argv[flags.newAdminKey.name] = '302e020100300506032b6570042204200cde8d512569610f184b8b399e91e46899805c6171f7c2b8666d2a417bcc66c2' + + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined + argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG + argv[flags.namespace.name] = namespace + argv[flags.persistentVolumeClaims.name] = true + const bootstrapResp = bootstrapNetwork(namespace, argv) + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const k8 = bootstrapResp.opts.k8 + let existingServiceMap + let existingNodeIdsPrivateKeysHash + + afterAll(async () => { + await getNodeLogs(k8, namespace) + await nodeCmd.stop(argv) + await k8.deleteNamespace(namespace) + }, 600000) + + it('cache current version of private keys', async () => { + existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) + existingNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + }, defaultTimeout) + + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).toBeTruthy() + }, 450000) + + it('should update a new node property successfully', async () => { + // generate gossip and tls keys for the updated node + const tmpDir = getTmpDir() + + const signingKey = await nodeCmd.keyManager.generateSigningKey(updateNodeId) + const signingKeyFiles = await nodeCmd.keyManager.storeSigningKey(updateNodeId, signingKey, tmpDir) + nodeCmd.logger.debug(`generated test gossip signing keys for node ${updateNodeId} : ${signingKeyFiles.certificateFile}`) + argv[flags.gossipPublicKey.name] = signingKeyFiles.certificateFile + argv[flags.gossipPrivateKey.name] = signingKeyFiles.privateKeyFile + + const tlsKey = await nodeCmd.keyManager.generateGrpcTLSKey(updateNodeId) + const tlsKeyFiles = await nodeCmd.keyManager.storeTLSKey(updateNodeId, tlsKey, tmpDir) + nodeCmd.logger.debug(`generated test TLS keys for node ${updateNodeId} : ${tlsKeyFiles.certificateFile}`) + argv[flags.tlsPublicKey.name] = tlsKeyFiles.certificateFile + argv[flags.tlsPrivateKey.name] = tlsKeyFiles.privateKeyFile + + await nodeCmd.update(argv) + expect(nodeCmd.getUnusedConfigs(NodeCommand.UPDATE_CONFIGS_NAME)).toEqual([ + flags.app.constName, + flags.devMode.constName + ]) + await nodeCmd.accountManager.close() + }, 1800000) + + balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + it('signing key and tls key should not match previous one', async () => { + const currentNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + + for (const [nodeId, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { + const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeId) + + for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { + if (nodeId === updateNodeId && + (keyFileName.startsWith(constants.SIGNING_KEY_PREFIX) || keyFileName.startsWith('hedera'))) { + expect(`${nodeId}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).not.toEqual( + `${nodeId}:${keyFileName}:${existingKeyHash}`) + } else { + expect(`${nodeId}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).toEqual( + `${nodeId}:${keyFileName}:${existingKeyHash}`) + } + } + } + }, defaultTimeout) + + it('config.txt should be changed with new account id', async () => { + // read config.txt file from first node, read config.txt line by line, it should not contain value of newAccountId + const pods = await k8.getPodsByLabel(['fullstack.hedera.com/type=network-node']) + const podName = pods[0].metadata.name + const tmpDir = getTmpDir() + await k8.copyFrom(podName, ROOT_CONTAINER, `${HEDERA_HAPI_PATH}/config.txt`, tmpDir) + const configTxt = fs.readFileSync(`${tmpDir}/config.txt`, 'utf8') + console.log('config.txt:', configTxt) + expect(configTxt).toContain(newAccountId) + }, 600000) +}) diff --git a/test/e2e/commands/relay.test.mjs b/test/e2e/commands/relay.test.mjs index f32732ac9..ff8b47656 100644 --- a/test/e2e/commands/relay.test.mjs +++ b/test/e2e/commands/relay.test.mjs @@ -23,9 +23,6 @@ import { it } from '@jest/globals' import { flags } from '../../../src/commands/index.mjs' -import { - constants -} from '../../../src/core/index.mjs' import { bootstrapNetwork, getDefaultArgv, @@ -42,9 +39,8 @@ describe('RelayCommand', () => { const argv = getDefaultArgv() argv[flags.namespace.name] = namespace argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG - argv[flags.keyFormat.name] = constants.KEY_FORMAT_PEM - argv[flags.nodeIDs.name] = 'node0,node1' + argv[flags.nodeIDs.name] = 'node1,node2' argv[flags.generateGossipKeys.name] = true argv[flags.generateTlsKeys.name] = true argv[flags.clusterName.name] = TEST_CLUSTER @@ -67,12 +63,12 @@ describe('RelayCommand', () => { }) it.each([ - { relayNodes: 'node0' }, - { relayNodes: 'node0,node1' } + { relayNodes: 'node1' }, + { relayNodes: 'node1,node2' } ])('relay deploy and destroy should work with different number of relay nodes', async (input) => { argv[flags.nodeIDs.name] = input.relayNodes configManager.update(argv) - expect.assertions(2) + expect.assertions(3) // test relay deploy try { @@ -81,7 +77,10 @@ describe('RelayCommand', () => { relayCmd.logger.showUserError(e) expect(e).toBeNull() } - + expect(relayCmd.getUnusedConfigs(RelayCommand.DEPLOY_CONFIGS_NAME)).toEqual([ + flags.profileFile.constName, + flags.profileName.constName + ]) await sleep(500) // test relay destroy diff --git a/test/e2e/commands/separate_node_add.test.mjs b/test/e2e/commands/separate_node_add.test.mjs new file mode 100644 index 000000000..b6fc57068 --- /dev/null +++ b/test/e2e/commands/separate_node_add.test.mjs @@ -0,0 +1,113 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * @jest-environment steps + */ +import { + accountCreationShouldSucceed, + balanceQueryShouldSucceed, + bootstrapNetwork, + getDefaultArgv, + getNodeIdsPrivateKeysHash, getTmpDir, + HEDERA_PLATFORM_VERSION_TAG +} from '../../test_util.js' +import { flags } from '../../../src/commands/index.mjs' +import { getNodeLogs } from '../../../src/core/helpers.mjs' +import { NodeCommand } from '../../../src/commands/node.mjs' + +describe('Node add via separated commands should success', () => { + const defaultTimeout = 120000 + const namespace = 'node-add-separated' + const argv = getDefaultArgv() + argv[flags.nodeIDs.name] = 'node1,node2,node3' + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined + argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG + argv[flags.namespace.name] = namespace + argv[flags.force.name] = true + argv[flags.persistentVolumeClaims.name] = true + + const argvPrepare = Object.assign({}, argv) + + const tempDir = 'contextDir' + argvPrepare[flags.outputDir.name] = tempDir + + const argvExecute = getDefaultArgv() + argvExecute[flags.inputDir.name] = tempDir + + const bootstrapResp = bootstrapNetwork(namespace, argv) + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const networkCmd = bootstrapResp.cmd.networkCmd + const k8 = bootstrapResp.opts.k8 + let existingServiceMap + let existingNodeIdsPrivateKeysHash + + afterAll(async () => { + await getNodeLogs(k8, namespace) + await nodeCmd.accountManager.close() + await nodeCmd.stop(argv) + await networkCmd.destroy(argv) + await k8.deleteNamespace(namespace) + }, 600000) + + it('cache current version of private keys', async () => { + existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) + existingNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + }, defaultTimeout) + + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).toBeTruthy() + }, 450000) + + it('should add a new node to the network via the segregated commands successfully', async () => { + await nodeCmd.addPrepare(argvPrepare) + await nodeCmd.addSubmitTransactions(argvExecute) + await nodeCmd.addExecute(argvExecute) + expect(nodeCmd.getUnusedConfigs(NodeCommand.ADD_CONFIGS_NAME)).toEqual([ + flags.app.constName, + flags.chainId.constName, + flags.devMode.constName, + flags.generateGossipKeys.constName, + flags.generateTlsKeys.constName, + flags.gossipEndpoints.constName, + flags.grpcEndpoints.constName, + flags.adminKey.constName, + 'curDate', + 'freezeAdminPrivateKey' + ]) + await nodeCmd.accountManager.close() + }, 800000) + + balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + it('existing nodes private keys should not have changed', async () => { + const currentNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + + for (const [nodeId, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { + const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeId) + + for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { + expect(`${nodeId}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).toEqual( + `${nodeId}:${keyFileName}:${existingKeyHash}`) + } + } + }, defaultTimeout) +}, 180000) diff --git a/test/e2e/core/account_manager.test.mjs b/test/e2e/core/account_manager.test.mjs index 406c21e97..b4c91aee4 100644 --- a/test/e2e/core/account_manager.test.mjs +++ b/test/e2e/core/account_manager.test.mjs @@ -14,21 +14,37 @@ * limitations under the License. * */ -import { describe, expect, it } from '@jest/globals' -import path from 'path' import { flags } from '../../../src/commands/index.mjs' -import { AccountManager } from '../../../src/core/account_manager.mjs' -import { ConfigManager, constants, K8 } from '../../../src/core/index.mjs' -import { getTestCacheDir, testLogger } from '../../test_util.js' +import { + bootstrapNetwork, + getDefaultArgv, + TEST_CLUSTER +} from '../../test_util.js' +import * as version from '../../../version.mjs' describe('AccountManager', () => { - const configManager = new ConfigManager(testLogger, path.join(getTestCacheDir('accountCmd'), 'solo.config')) - configManager.setFlag(flags.namespace, 'solo-e2e') + const namespace = 'account-mngr-e2e' + const argv = getDefaultArgv() + argv[flags.namespace.name] = namespace + argv[flags.nodeIDs.name] = 'node1' + argv[flags.clusterName.name] = TEST_CLUSTER + argv[flags.fstChartVersion.name] = version.FST_CHART_VERSION + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined + const bootstrapResp = bootstrapNetwork(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, false) + const k8 = bootstrapResp.opts.k8 + const accountManager = bootstrapResp.opts.accountManager + const configManager = bootstrapResp.opts.configManager - const k8 = new K8(configManager, testLogger) - const accountManager = new AccountManager(testLogger, k8, constants) + afterAll(async () => { + await k8.deleteNamespace(namespace) + await accountManager.close() + }, 180000) it('should be able to stop port forwards', async () => { + await accountManager.close() expect.assertions(4) const localHost = '127.0.0.1' @@ -36,22 +52,22 @@ describe('AccountManager', () => { const podPort = 9090 const localPort = 19090 - expect(accountManager._portForwards.length).toStrictEqual(0) + expect(accountManager._portForwards.length, 'starting accountManager port forwards lengths should be zero').toStrictEqual(0) // ports should be opened accountManager._portForwards.push(await k8.portForward(podName, localPort, podPort)) const status = await k8.testConnection(localHost, localPort) - expect(status).toBeTruthy() + expect(status, 'test connection status should be true').toBeTruthy() // ports should be closed await accountManager.close() try { await k8.testConnection(localHost, localPort) } catch (e) { - expect(e.message.includes(`failed to connect to '${localHost}:${localPort}'`)).toBeTruthy() + expect(e.message.includes(`failed to connect to '${localHost}:${localPort}'`), 'expect failed test connection').toBeTruthy() } - expect(accountManager._portForwards.length).toStrictEqual(0) + expect(accountManager._portForwards.length, 'expect that the closed account manager should have no port forwards').toStrictEqual(0) }) it('should be able to load a new client', async () => { diff --git a/test/e2e/core/chart_manager.test.mjs b/test/e2e/core/chart_manager.test.mjs index 5761ccc92..58faf3f61 100644 --- a/test/e2e/core/chart_manager.test.mjs +++ b/test/e2e/core/chart_manager.test.mjs @@ -14,31 +14,24 @@ * limitations under the License. * */ -import { beforeAll, describe, expect, it } from '@jest/globals' -import { flags } from '../../../src/commands/index.mjs' -import { ChartManager, ConfigManager, Helm, constants } from '../../../src/core/index.mjs' +import { ChartManager, Helm, constants } from '../../../src/core/index.mjs' import { testLogger } from '../../test_util.js' describe('ChartManager', () => { const helm = new Helm(testLogger) const chartManager = new ChartManager(helm, testLogger) - const configManager = new ConfigManager(testLogger) - - beforeAll(() => { - configManager.load() - }) it('should be able to list installed charts', async () => { - const ns = configManager.getFlag(flags.namespace) - expect(ns).not.toBeNull() + const ns = constants.FULLSTACK_SETUP_NAMESPACE + expect(ns, 'namespace should not be null').not.toBeNull() const list = await chartManager.getInstalledCharts(ns) - expect(list.length).not.toBe(0) + expect(list.length, 'should have at least one installed chart').not.toBe(0) }) it('should be able to check if a chart is installed', async () => { - const ns = configManager.getFlag(flags.namespace) - expect(ns).not.toBeNull() - const isInstalled = await chartManager.isChartInstalled(ns, constants.FULLSTACK_DEPLOYMENT_CHART) - expect(isInstalled).toBeTruthy() + const ns = constants.FULLSTACK_SETUP_NAMESPACE + expect(ns, 'namespace should not be null').not.toBeNull() + const isInstalled = await chartManager.isChartInstalled(ns, constants.FULLSTACK_CLUSTER_SETUP_CHART) + expect(isInstalled, `${constants.FULLSTACK_CLUSTER_SETUP_CHART} should be installed`).toBeTruthy() }) }) diff --git a/test/e2e/core/k8_e2e.test.mjs b/test/e2e/core/k8_e2e.test.mjs index 981f6f614..0622dc6fd 100644 --- a/test/e2e/core/k8_e2e.test.mjs +++ b/test/e2e/core/k8_e2e.test.mjs @@ -14,7 +14,6 @@ * limitations under the License. * */ -import { beforeAll, describe, expect, it } from '@jest/globals' import fs from 'fs' import net from 'net' import os from 'os' @@ -23,16 +22,90 @@ import { v4 as uuid4 } from 'uuid' import { FullstackTestingError } from '../../../src/core/errors.mjs' import { ConfigManager, constants, logging, Templates } from '../../../src/core/index.mjs' import { K8 } from '../../../src/core/k8.mjs' +import { flags } from '../../../src/commands/index.mjs' +import { + V1Container, + V1ExecAction, + V1ObjectMeta, + V1PersistentVolumeClaim, + V1PersistentVolumeClaimSpec, + V1Pod, + V1PodSpec, + V1Probe, + V1Service, + V1ServicePort, + V1ServiceSpec, + V1VolumeResourceRequirements +} from '@kubernetes/client-node' +import crypto from 'crypto' -const defaultTimeout = 20000 +const defaultTimeout = 120000 describe('K8', () => { const testLogger = logging.NewLogger('debug', true) const configManager = new ConfigManager(testLogger) const k8 = new K8(configManager, testLogger) + const testNamespace = 'k8-e2e' + const argv = [] + const podName = `test-pod-${uuid4()}` + const containerName = 'alpine' + const podLabelValue = `test-${uuid4()}` + const serviceName = `test-service-${uuid4()}` - beforeAll(() => { - configManager.load() + beforeAll(async () => { + try { + argv[flags.namespace.name] = testNamespace + configManager.update(argv) + if (!await k8.hasNamespace(testNamespace)) { + await k8.createNamespace(testNamespace) + } + const v1Pod = new V1Pod() + const v1Metadata = new V1ObjectMeta() + v1Metadata.name = podName + v1Metadata.namespace = testNamespace + v1Metadata.labels = { app: podLabelValue } + v1Pod.metadata = v1Metadata + const v1Container = new V1Container() + v1Container.name = containerName + v1Container.image = 'alpine:latest' + v1Container.command = ['/bin/sh', '-c', 'apk update && apk upgrade && apk add --update bash && sleep 7200'] + const v1Probe = new V1Probe() + const v1ExecAction = new V1ExecAction() + v1ExecAction.command = ['bash', '-c', 'exit 0'] + v1Probe.exec = v1ExecAction + v1Container.startupProbe = v1Probe + const v1Spec = new V1PodSpec() + v1Spec.containers = [v1Container] + v1Pod.spec = v1Spec + await k8.kubeClient.createNamespacedPod(testNamespace, v1Pod) + const v1Svc = new V1Service() + const v1SvcMetadata = new V1ObjectMeta() + v1SvcMetadata.name = serviceName + v1SvcMetadata.namespace = testNamespace + v1SvcMetadata.labels = { app: 'svc-test' } + v1Svc.metadata = v1SvcMetadata + const v1SvcSpec = new V1ServiceSpec() + const v1SvcPort = new V1ServicePort() + v1SvcPort.port = 80 + v1SvcPort.targetPort = 80 + v1SvcSpec.ports = [v1SvcPort] + v1Svc.spec = v1SvcSpec + await k8.kubeClient.createNamespacedService(testNamespace, v1Svc) + } catch (e) { + console.log(`${e}, ${e.stack}`) + throw e + } + }, defaultTimeout) + + afterAll(async () => { + try { + await k8.kubeClient.deleteNamespacedPod(podName, testNamespace, undefined, undefined, 1) + argv[flags.namespace.name] = constants.FULLSTACK_SETUP_NAMESPACE + configManager.update(argv) + } catch (e) { + console.log(e) + throw e + } }, defaultTimeout) it('should be able to list clusters', async () => { @@ -57,46 +130,86 @@ describe('K8', () => { await expect(k8.deleteNamespace(name)).resolves.toBeTruthy() }, defaultTimeout) + it('should be able to run wait for pod', async () => { + const labels = [`app=${podLabelValue}`] + + const pods = await k8.waitForPods([constants.POD_PHASE_RUNNING], labels, 1, 30) + expect(pods.length).toStrictEqual(1) + }, defaultTimeout) + + it('should be able to run wait for pod ready', async () => { + const labels = [`app=${podLabelValue}`] + + const pods = await k8.waitForPodReady(labels, 1, 100) + expect(pods.length).toStrictEqual(1) + }, defaultTimeout) + + it('should be able to run wait for pod conditions', async () => { + const labels = [`app=${podLabelValue}`] + + const conditions = new Map() + .set(constants.POD_CONDITION_INITIALIZED, constants.POD_CONDITION_STATUS_TRUE) + .set(constants.POD_CONDITION_POD_SCHEDULED, constants.POD_CONDITION_STATUS_TRUE) + .set(constants.POD_CONDITION_READY, constants.POD_CONDITION_STATUS_TRUE) + const pods = await k8.waitForPodConditions(conditions, labels, 1) + expect(pods.length).toStrictEqual(1) + }, defaultTimeout) + it('should be able to detect pod IP of a pod', async () => { - const podName = Templates.renderNetworkPodName('node0') + const pods = await k8.getPodsByLabel([`app=${podLabelValue}`]) + const podName = pods[0].metadata.name await expect(k8.getPodIP(podName)).resolves.not.toBeNull() await expect(k8.getPodIP('INVALID')).rejects.toThrow(FullstackTestingError) }, defaultTimeout) it('should be able to detect cluster IP', async () => { - const svcName = Templates.renderNetworkSvcName('node0') - await expect(k8.getClusterIP(svcName)).resolves.not.toBeNull() + await expect(k8.getClusterIP(serviceName)).resolves.not.toBeNull() await expect(k8.getClusterIP('INVALID')).rejects.toThrow(FullstackTestingError) }, defaultTimeout) it('should be able to check if a path is directory inside a container', async () => { - const podName = Templates.renderNetworkPodName('node0') - await expect(k8.hasDir(podName, constants.ROOT_CONTAINER, constants.HEDERA_USER_HOME_DIR)).resolves.toBeTruthy() + const pods = await k8.getPodsByLabel([`app=${podLabelValue}`]) + const podName = pods[0].metadata.name + await expect(k8.hasDir(podName, containerName, '/tmp')).resolves.toBeTruthy() }, defaultTimeout) - it('should be able to copy a file to and from a container', async () => { - const podName = Templates.renderNetworkPodName('node0') - const containerName = constants.ROOT_CONTAINER - - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'k8-')) - const destDir = constants.HEDERA_USER_HOME_DIR - const srcPath = 'test/data/pem/keys/a-private-node0.pem' - const destPath = `${destDir}/a-private-node0.pem` + describe.each([ + { localFilePath: 'test/data/pem/keys/a-private-node0.pem' }, + { localFilePath: 'test/data/build-v0.54.0-alpha.4.zip' } + ])('test copyTo and copyFrom', (input) => { + it('should be able to copy a file to and from a container', async () => { + const pods = await k8.waitForPodReady([`app=${podLabelValue}`], 1, 20) + expect(pods.length).toStrictEqual(1) + const localTmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'k8-test')) + const remoteTmpDir = '/tmp' + const localFilePath = input.localFilePath + const fileName = path.basename(localFilePath) + const remoteFilePath = `${remoteTmpDir}/${fileName}` + const originalFileData = fs.readFileSync(localFilePath) + const originalFileHash = crypto.createHash('sha384').update(originalFileData).digest('hex') + const originalStat = fs.statSync(localFilePath) - // upload the file - await expect(k8.copyTo(podName, containerName, srcPath, destDir)).resolves.toBeTruthy() + // upload the file + await expect(k8.copyTo(podName, containerName, localFilePath, remoteTmpDir)).resolves.toBeTruthy() - // download the same file - await expect(k8.copyFrom(podName, containerName, destPath, tmpDir)).resolves.toBeTruthy() + // download the same file + await expect(k8.copyFrom(podName, containerName, remoteFilePath, localTmpDir)).resolves.toBeTruthy() + const downloadedFilePath = path.join(localTmpDir, fileName) + const downloadedFileData = fs.readFileSync(downloadedFilePath) + const downloadedFileHash = crypto.createHash('sha384').update(downloadedFileData).digest('hex') + const downloadedStat = fs.statSync(downloadedFilePath) - // rm file inside the container - await expect(k8.execContainer(podName, containerName, ['rm', '-f', destPath])).resolves + expect(downloadedStat.size, 'downloaded file size should match original file size').toEqual(originalStat.size) + expect(downloadedFileHash, 'downloaded file hash should match original file hash').toEqual(originalFileHash) + // rm file inside the container + await expect(k8.execContainer(podName, containerName, ['rm', '-f', remoteFilePath])).resolves - fs.rmdirSync(tmpDir, { recursive: true }) - }, defaultTimeout) + fs.rmdirSync(localTmpDir, { recursive: true }) + }, defaultTimeout) + }) it('should be able to port forward gossip port', (done) => { - const podName = Templates.renderNetworkPodName('node0') + const podName = Templates.renderNetworkPodName('node1') const localPort = constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT try { k8.portForward(podName, localPort, constants.HEDERA_NODE_INTERNAL_GOSSIP_PORT).then((server) => { @@ -122,58 +235,37 @@ describe('K8', () => { testLogger.showUserError(e) expect(e).toBeNull() } + // TODO enhance this test to do something with the port, this pod isn't even running, but it is still passing }, defaultTimeout) - it('should be able to run wait for pod', async () => { - const labels = [ - 'fullstack.hedera.com/type=network-node' - ] - - const pods = await k8.waitForPods([constants.POD_PHASE_RUNNING], labels, 1) - expect(pods.length).toStrictEqual(1) - }, defaultTimeout) - - it('should be able to run wait for pod ready', async () => { - const labels = [ - 'fullstack.hedera.com/type=network-node' - ] - - const pods = await k8.waitForPodReady(labels, 1) - expect(pods.length).toStrictEqual(1) - }, defaultTimeout) - - it('should be able to run wait for pod conditions', async () => { - const labels = [ - 'fullstack.hedera.com/type=network-node' - ] - - const conditions = new Map() - .set(constants.POD_CONDITION_INITIALIZED, constants.POD_CONDITION_STATUS_TRUE) - .set(constants.POD_CONDITION_POD_SCHEDULED, constants.POD_CONDITION_STATUS_TRUE) - .set(constants.POD_CONDITION_READY, constants.POD_CONDITION_STATUS_TRUE) - const pods = await k8.waitForPodConditions(conditions, labels, 1) - expect(pods.length).toStrictEqual(1) - }, defaultTimeout) - - it('should be able to cat a log file inside the container', async () => { - const podName = Templates.renderNetworkPodName('node0') - const containerName = constants.ROOT_CONTAINER - const testFileName = 'test.txt' - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'k8-')) - const tmpFile = path.join(tmpDir, testFileName) - const destDir = constants.HEDERA_USER_HOME_DIR - const destPath = `${destDir}/${testFileName}` - fs.writeFileSync(tmpFile, 'TEST\nNow current platform status = ACTIVE') - - await expect(k8.copyTo(podName, containerName, tmpFile, destDir)).resolves.toBeTruthy() - const output = await k8.execContainer(podName, containerName, ['tail', '-10', destPath]) - expect(output.indexOf('Now current platform status = ACTIVE')).toBeGreaterThan(0) - - fs.rmdirSync(tmpDir, { recursive: true }) + it('should be able to cat a file inside the container', async () => { + const pods = await k8.getPodsByLabel([`app=${podLabelValue}`]) + const podName = pods[0].metadata.name + const output = await k8.execContainer(podName, containerName, ['cat', '/etc/hostname']) + expect(output.indexOf(podName)).toEqual(0) }, defaultTimeout) it('should be able to list persistent volume claims', async () => { - const pvcs = await k8.listPvcsByNamespace(k8._getNamespace()) - expect(pvcs.length).toBeGreaterThan(0) + const v1Pvc = new V1PersistentVolumeClaim() + try { + v1Pvc.name = `test-pvc-${uuid4()}` + const v1Spec = new V1PersistentVolumeClaimSpec() + v1Spec.accessModes = ['ReadWriteOnce'] + const v1ResReq = new V1VolumeResourceRequirements() + v1ResReq.requests = { storage: '50Mi' } + v1Spec.resources = v1ResReq + v1Pvc.spec = v1Spec + const v1Metadata = new V1ObjectMeta() + v1Metadata.name = v1Pvc.name + v1Pvc.metadata = v1Metadata + await k8.kubeClient.createNamespacedPersistentVolumeClaim(testNamespace, v1Pvc) + const pvcs = await k8.listPvcsByNamespace(testNamespace) + expect(pvcs.length).toBeGreaterThan(0) + } catch (e) { + console.error(e) + throw e + } finally { + await k8.deletePvc(v1Pvc.name, testNamespace) + } }, defaultTimeout) }) diff --git a/test/e2e/core/platform_installer_e2e.test.mjs b/test/e2e/core/platform_installer_e2e.test.mjs index 97458a9c8..0c2ec6813 100644 --- a/test/e2e/core/platform_installer_e2e.test.mjs +++ b/test/e2e/core/platform_installer_e2e.test.mjs @@ -14,30 +14,48 @@ * limitations under the License. * */ -import { beforeAll, describe, expect, it } from '@jest/globals' -import { - PlatformInstaller, - constants, - Templates, - ConfigManager, Templates as Template -} from '../../../src/core/index.mjs' +import { afterAll, beforeAll, describe, expect, it } from '@jest/globals' +import { constants } from '../../../src/core/index.mjs' import * as fs from 'fs' -import { K8 } from '../../../src/core/k8.mjs' -import { getTestCacheDir, getTmpDir, testLogger } from '../../test_util.js' -import { AccountManager } from '../../../src/core/account_manager.mjs' +import { + bootstrapNetwork, + getDefaultArgv, + getTestCacheDir, + TEST_CLUSTER, + testLogger +} from '../../test_util.js' +import { flags } from '../../../src/commands/index.mjs' +import * as version from '../../../version.mjs' const defaultTimeout = 20000 describe('PackageInstallerE2E', () => { - const configManager = new ConfigManager(testLogger) - const k8 = new K8(configManager, testLogger) - const accountManager = new AccountManager(testLogger, k8) - const installer = new PlatformInstaller(testLogger, k8, configManager, accountManager) + const namespace = 'pkg-installer-e2e' + const argv = getDefaultArgv() const testCacheDir = getTestCacheDir() - const podName = 'network-node0-0' + argv[flags.cacheDir.name] = testCacheDir + argv[flags.namespace.name] = namespace + argv[flags.nodeIDs.name] = 'node1' + argv[flags.clusterName.name] = TEST_CLUSTER + argv[flags.fstChartVersion.name] = version.FST_CHART_VERSION + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined + const bootstrapResp = bootstrapNetwork(namespace, argv, undefined, undefined, undefined, undefined, undefined, undefined, false) + const k8 = bootstrapResp.opts.k8 + const accountManager = bootstrapResp.opts.accountManager + const configManager = bootstrapResp.opts.configManager + const installer = bootstrapResp.opts.platformInstaller + const podName = 'network-node1-0' const packageVersion = 'v0.42.5' + afterAll(async () => { + await k8.deleteNamespace(namespace) + await accountManager.close() + }, 180000) + beforeAll(async () => { if (!fs.existsSync(testCacheDir)) { fs.mkdirSync(testCacheDir) @@ -79,111 +97,4 @@ describe('PackageInstallerE2E', () => { testLogger.showUser(outputs) }, 60000) }) - - describe('prepareConfigTxt', () => { - it('should succeed in generating config.txt', async () => { - const tmpDir = getTmpDir() - const configPath = `${tmpDir}/config.txt` - const nodeIDs = ['node0'] - const chainId = '299' - - const configLines = await installer.prepareConfigTxt(nodeIDs, configPath, packageVersion, chainId) - - // verify format is correct - expect(configLines.length).toBe(4) - expect(configLines[0]).toBe(`swirld, ${chainId}`) - expect(configLines[1]).toBe(`app, ${constants.HEDERA_APP_NAME}`) - expect(configLines[2]).toContain('address, 0, node0, node0, 1') - expect(configLines[3]).toBe('nextNodeId, 1') - - // verify the file exists - expect(fs.existsSync(configPath)).toBeTruthy() - const fileContents = fs.readFileSync(configPath).toString() - - // verify file content matches - expect(fileContents).toBe(configLines.join('\n')) - - fs.rmSync(tmpDir, { recursive: true }) - }, defaultTimeout) - }) - - describe('copyGossipKeys', () => { - it('should succeed to copy legacy pfx gossip keys for node0', async () => { - const podName = 'network-node0-0' - const nodeId = 'node0' - - // generate pfx keys - const pfxDir = 'test/data/pfx' - await k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -f ${constants.HEDERA_HAPI_PATH}/data/keys/*`]) - const fileList = await installer.copyGossipKeys(podName, pfxDir, ['node0'], constants.KEY_FORMAT_PFX) - - const destDir = `${constants.HEDERA_HAPI_PATH}/data/keys` - expect(fileList.length).toBe(2) - expect(fileList).toContain(`${destDir}/${Templates.renderGossipPfxPrivateKeyFile(nodeId)}`) - expect(fileList).toContain(`${destDir}/public.pfx`) - }, 60000) - - it('should succeed to copy pem gossip keys for node0', async () => { - const podName = 'network-node0-0' - - const pemDir = 'test/data/pem' - await k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -f ${constants.HEDERA_HAPI_PATH}/data/keys/*`]) - const fileList = await installer.copyGossipKeys(podName, pemDir, ['node0'], constants.KEY_FORMAT_PEM) - - const destDir = `${constants.HEDERA_HAPI_PATH}/data/keys` - expect(fileList.length).toBe(4) - expect(fileList).toContain(`${destDir}/${Templates.renderGossipPemPrivateKeyFile(constants.SIGNING_KEY_PREFIX, 'node0')}`) - expect(fileList).toContain(`${destDir}/${Templates.renderGossipPemPrivateKeyFile(constants.AGREEMENT_KEY_PREFIX, 'node0')}`) - - // public keys - expect(fileList).toContain(`${destDir}/${Templates.renderGossipPemPublicKeyFile(constants.SIGNING_KEY_PREFIX, 'node0')}`) - expect(fileList).toContain(`${destDir}/${Templates.renderGossipPemPublicKeyFile(constants.AGREEMENT_KEY_PREFIX, 'node0')}`) - }, 60000) - }) - - describe('copyTLSKeys', () => { - it('should succeed to copy TLS keys for node0', async () => { - const nodeId = 'node0' - const podName = Template.renderNetworkPodName(nodeId) - const tmpDir = getTmpDir() - - // create mock files - const pemDir = 'test/data/pem' - await k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -f ${constants.HEDERA_HAPI_PATH}/hedera.*`]) - const fileList = await installer.copyTLSKeys(podName, pemDir) - - expect(fileList.length).toBe(2) // [data , hedera.crt, hedera.key] - expect(fileList.length).toBeGreaterThanOrEqual(2) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/hedera.crt`) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/hedera.key`) - - fs.rmSync(tmpDir, { recursive: true }) - }, defaultTimeout) - }) - - describe('copyPlatformConfigFiles', () => { - it('should succeed to copy platform config files for node0', async () => { - const podName = 'network-node0-0' - await k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -f ${constants.HEDERA_HAPI_PATH}/*.txt`]) - await k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -f ${constants.HEDERA_HAPI_PATH}/*.xml`]) - await k8.execContainer(podName, constants.ROOT_CONTAINER, ['bash', '-c', `rm -f ${constants.HEDERA_HAPI_PATH}/data/config/*.properties`]) - - const tmpDir = getTmpDir() - const nodeIDs = ['node0'] - const releaseTag = 'v0.42.0' - - fs.cpSync(`${constants.RESOURCES_DIR}/templates`, `${tmpDir}/templates`, { recursive: true }) - await installer.prepareConfigTxt(nodeIDs, `${tmpDir}/config.txt`, releaseTag, constants.HEDERA_CHAIN_ID, `${tmpDir}/templates/config.template`) - - const fileList = await installer.copyPlatformConfigFiles(podName, tmpDir) - expect(fileList.length).toBeGreaterThanOrEqual(6) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/config.txt`) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/log4j2.xml`) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/settings.txt`) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/data/config/api-permission.properties`) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/data/config/application.properties`) - expect(fileList).toContain(`${constants.HEDERA_HAPI_PATH}/data/config/bootstrap.properties`) - fs.rmSync(tmpDir, { recursive: true }) - }, defaultTimeout) - }) }) diff --git a/test/e2e/e2e_node_util.js b/test/e2e/e2e_node_util.js index 5970c724c..1ff6f283f 100644 --- a/test/e2e/e2e_node_util.js +++ b/test/e2e/e2e_node_util.js @@ -16,12 +16,6 @@ * @jest-environment steps */ -import { - AccountCreateTransaction, - Hbar, - HbarUnit, - PrivateKey -} from '@hashgraph/sdk' import { afterAll, afterEach, @@ -32,288 +26,171 @@ import { } from '@jest/globals' import { flags } from '../../src/commands/index.mjs' import { - constants, Templates -} from '../../src/core/index.mjs' -import { + accountCreationShouldSucceed, balanceQueryShouldSucceed, bootstrapNetwork, getDefaultArgv, getTestConfigManager, - getTmpDir, HEDERA_PLATFORM_VERSION_TAG, TEST_CLUSTER } from '../test_util.js' import { getNodeLogs, sleep } from '../../src/core/helpers.mjs' -import path from 'path' -import fs from 'fs' -import crypto from 'crypto' -import { ROOT_CONTAINER } from '../../src/core/constants.mjs' +import { NodeCommand } from '../../src/commands/node.mjs' -export function e2eNodeKeyRefreshAddTest (keyFormat, testName, mode, releaseTag = HEDERA_PLATFORM_VERSION_TAG) { +export function e2eNodeKeyRefreshTest (testName, mode, releaseTag = HEDERA_PLATFORM_VERSION_TAG) { const defaultTimeout = 120000 - describe(`NodeCommand [testName ${testName}, mode ${mode}, keyFormat: ${keyFormat}, release ${releaseTag}]`, () => { - const namespace = testName - const argv = getDefaultArgv() - argv[flags.namespace.name] = namespace - argv[flags.releaseTag.name] = releaseTag - argv[flags.keyFormat.name] = keyFormat - argv[flags.nodeIDs.name] = 'node0,node1,node2,node3' - argv[flags.generateGossipKeys.name] = true - argv[flags.generateTlsKeys.name] = true - argv[flags.clusterName.name] = TEST_CLUSTER - // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts - argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined - - const bootstrapResp = bootstrapNetwork(testName, argv) - const accountManager = bootstrapResp.opts.accountManager - const k8 = bootstrapResp.opts.k8 - const nodeCmd = bootstrapResp.cmd.nodeCmd - - afterEach(async () => { - await nodeCmd.close() - await accountManager.close() - }, defaultTimeout) - - afterAll(async () => { - await getNodeLogs(k8, namespace) - await k8.deleteNamespace(namespace) - }, 180000) - - describe(`Node should have started successfully [mode ${mode}, release ${releaseTag}, keyFormat: ${keyFormat}]`, () => { - balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) - - accountCreationShouldSucceed(accountManager, nodeCmd, namespace) - - it(`Node Proxy should be UP [mode ${mode}, release ${releaseTag}, keyFormat: ${keyFormat}`, async () => { - expect.assertions(1) - - try { - await expect(k8.waitForPodReady( - ['app=haproxy-node0', 'fullstack.hedera.com/type=haproxy'], - 1, 300, 1000)).resolves.toBeTruthy() - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } finally { + describe( + `NodeCommand [testName ${testName}, mode ${mode}, release ${releaseTag}]`, + () => { + const namespace = testName + const argv = getDefaultArgv() + argv[flags.namespace.name] = namespace + argv[flags.releaseTag.name] = releaseTag + argv[flags.nodeIDs.name] = 'node1,node2,node3' + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + argv[flags.clusterName.name] = TEST_CLUSTER + argv[flags.devMode.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR + ? process.env.SOLO_FST_CHARTS_DIR + : undefined + + const bootstrapResp = bootstrapNetwork(testName, argv) + const accountManager = bootstrapResp.opts.accountManager + const k8 = bootstrapResp.opts.k8 + const nodeCmd = bootstrapResp.cmd.nodeCmd + + afterEach(async () => { await nodeCmd.close() + await accountManager.close() + }, defaultTimeout) + + afterAll(async () => { + await getNodeLogs(k8, namespace) + await k8.deleteNamespace(namespace) + }, 600000) + + describe( + `Node should have started successfully [mode ${mode}, release ${releaseTag}]`, + () => { + balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(accountManager, nodeCmd, namespace) + + it(`Node Proxy should be UP [mode ${mode}, release ${releaseTag}`, + async () => { + expect.assertions(1) + + try { + await expect(k8.waitForPodReady( + ['app=haproxy-node1', + 'fullstack.hedera.com/type=haproxy'], + 1, 300, 1000)).resolves.toBeTruthy() + } catch (e) { + nodeCmd.logger.showUserError(e) + expect(e).toBeNull() + } finally { + await nodeCmd.close() + } + }, defaultTimeout) + }) + + describe( + `Node should refresh successfully [mode ${mode}, release ${releaseTag}]`, + () => { + const nodeId = 'node1' + + beforeAll(async () => { + const podName = await nodeRefreshTestSetup(argv, testName, k8, + nodeId) + if (mode === 'kill') { + const resp = await k8.kubeClient.deleteNamespacedPod(podName, + namespace) + expect(resp.response.statusCode).toEqual(200) + await sleep(20000) // sleep to wait for pod to finish terminating + } else if (mode === 'stop') { + await expect(nodeCmd.stop(argv)).resolves.toBeTruthy() + await sleep(20000) // give time for node to stop and update its logs + } else { + throw new Error(`invalid mode: ${mode}`) + } + }, 120000) + + nodePodShouldBeRunning(nodeCmd, namespace, nodeId) + + nodeShouldNotBeActive(nodeCmd, nodeId) + + nodeRefreshShouldSucceed(nodeId, nodeCmd, argv) + + balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(accountManager, nodeCmd, namespace) + }) + + function nodePodShouldBeRunning (nodeCmd, namespace, nodeId) { + it(`${nodeId} should be running`, async () => { + try { + await expect(nodeCmd.checkNetworkNodePod(namespace, + nodeId)).resolves.toBeTruthy() + } catch (e) { + nodeCmd.logger.showUserError(e) + expect(e).toBeNull() + } finally { + await nodeCmd.close() + } + }, defaultTimeout) } - }, defaultTimeout) - }) - describe(`Node should refresh successfully [mode ${mode}, release ${releaseTag}, keyFormat: ${keyFormat}]`, () => { - const nodeId = 'node0' - - beforeAll(async () => { - const podName = await nodeRefreshTestSetup(argv, testName, k8, nodeId) - if (mode === 'kill') { - const resp = await k8.kubeClient.deleteNamespacedPod(podName, namespace) - expect(resp.response.statusCode).toEqual(200) - await sleep(20000) // sleep to wait for pod to finish terminating - } else if (mode === 'stop') { - await expect(nodeCmd.stop(argv)).resolves.toBeTruthy() - await sleep(20000) // give time for node to stop and update its logs - } else { - throw new Error(`invalid mode: ${mode}`) + function nodeRefreshShouldSucceed (nodeId, nodeCmd, argv) { + it(`${nodeId} refresh should succeed`, async () => { + try { + await expect(nodeCmd.refresh(argv)).resolves.toBeTruthy() + expect(nodeCmd.getUnusedConfigs( + NodeCommand.REFRESH_CONFIGS_NAME)).toEqual( + [flags.devMode.constName]) + } catch (e) { + nodeCmd.logger.showUserError(e) + expect(e).toBeNull() + } finally { + await nodeCmd.close() + await sleep(10000) // sleep to wait for node to finish starting + } + }, 1200000) } - }, 120000) - - nodePodShouldBeRunning(nodeCmd, namespace, nodeId) - nodeShouldNotBeActive(nodeCmd, nodeId) - - nodeRefreshShouldSucceed(nodeId, nodeCmd, argv) - - balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) - - accountCreationShouldSucceed(accountManager, nodeCmd, namespace) - }) - - describe(`Should add a new node to the network [release ${releaseTag}, keyFormat: ${keyFormat}]`, () => { - const nodeId = 'node4' - let existingServiceMap - let existingNodeIdsPrivateKeysHash - - beforeAll(async () => { - argv[flags.nodeIDs.name] = nodeId - const configManager = getTestConfigManager(`${testName}-solo.config`) - configManager.update(argv, true) - existingServiceMap = await accountManager.getNodeServiceMap(namespace) - existingNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, keyFormat, k8, getTmpDir()) - }, defaultTimeout) - - it(`${nodeId} should not exist`, async () => { - try { - await expect(nodeCmd.checkNetworkNodePod(namespace, nodeId, 10, 50)).rejects.toThrowError(`no pod found for nodeId: ${nodeId}`) - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } finally { - await nodeCmd.close() + function nodeShouldNotBeActive (nodeCmd, nodeId) { + it(`${nodeId} should not be ACTIVE`, async () => { + expect(2) + try { + await expect( + nodeCmd.checkNetworkNodeActiveness(namespace, nodeId, { title: '' }, '', 44, undefined, 15) + ).rejects.toThrowError() + } catch (e) { + expect(e).not.toBeNull() + } finally { + await nodeCmd.close() + } + }, defaultTimeout) } - }, 180000) - - balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) - accountCreationShouldSucceed(accountManager, nodeCmd, namespace) - - it(`add ${nodeId} to the network`, async () => { - try { - await expect(nodeCmd.add(argv)).resolves.toBeTruthy() - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } finally { - await nodeCmd.close() - await sleep(10000) // sleep to wait for node to finish starting - } - }, 600000) - - balanceQueryShouldSucceed(accountManager, nodeCmd, namespace) - - accountCreationShouldSucceed(accountManager, nodeCmd, namespace) - - it('existing nodes private keys should not have changed', async () => { - const currentNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, keyFormat, k8, getTmpDir()) - - for (const [nodeId, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { - const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeId) - - for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { - expect(`${nodeId}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).toEqual( - `${nodeId}:${keyFileName}:${existingKeyHash}`) + async function nodeRefreshTestSetup (argv, testName, k8, nodeId) { + argv[flags.nodeIDs.name] = nodeId + const configManager = getTestConfigManager(`${testName}-solo.config`) + configManager.update(argv, true) + + const podArray = await k8.getPodsByLabel( + [`app=network-${nodeId}`, + 'fullstack.hedera.com/type=network-node']) + + if (podArray.length > 0) { + const podName = podArray[0].metadata.name + k8.logger.info(`nodeRefreshTestSetup: podName: ${podName}`) + return podName + } else { + throw new Error(`pod for ${nodeId} not found`) } } - }, defaultTimeout) - }) - }) - - function accountCreationShouldSucceed (accountManager, nodeCmd, namespace) { - it('Account creation should succeed', async () => { - expect.assertions(3) - - try { - await accountManager.loadNodeClient(namespace) - expect(accountManager._nodeClient).not.toBeNull() - const privateKey = PrivateKey.generate() - const amount = 100 - - const newAccount = await new AccountCreateTransaction() - .setKey(privateKey) - .setInitialBalance(Hbar.from(amount, HbarUnit.Hbar)) - .execute(accountManager._nodeClient) - - // Get the new account ID - const getReceipt = await newAccount.getReceipt(accountManager._nodeClient) - const accountInfo = { - accountId: getReceipt.accountId.toString(), - privateKey: privateKey.toString(), - publicKey: privateKey.publicKey.toString(), - balance: amount - } - - expect(accountInfo.accountId).not.toBeNull() - expect(accountInfo.balance).toEqual(amount) - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } - }, defaultTimeout) - } - - function nodePodShouldBeRunning (nodeCmd, namespace, nodeId) { - it(`${nodeId} should be running`, async () => { - try { - await expect(nodeCmd.checkNetworkNodePod(namespace, nodeId)).resolves.toBeTruthy() - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } finally { - await nodeCmd.close() - } - }, defaultTimeout) - } - - function nodeRefreshShouldSucceed (nodeId, nodeCmd, argv) { - it(`${nodeId} refresh should succeed`, async () => { - try { - await expect(nodeCmd.refresh(argv)).resolves.toBeTruthy() - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } finally { - await nodeCmd.close() - await sleep(10000) // sleep to wait for node to finish starting - } - }, 1200000) - } - - function nodeShouldNotBeActive (nodeCmd, nodeId) { - it(`${nodeId} should not be ACTIVE`, async () => { - expect(2) - try { - await expect(nodeCmd.checkNetworkNodeState(nodeId, 5)).rejects.toThrowError() - } catch (e) { - expect(e).not.toBeNull() - } finally { - await nodeCmd.close() - } - }, defaultTimeout) - } - - async function nodeRefreshTestSetup (argv, testName, k8, nodeId) { - argv[flags.nodeIDs.name] = nodeId - const configManager = getTestConfigManager(`${testName}-solo.config`) - configManager.update(argv, true) - - const podArray = await k8.getPodsByLabel( - [`app=network-${nodeId}`, 'fullstack.hedera.com/type=network-node']) - - if (podArray.length > 0) { - const podName = podArray[0].metadata.name - k8.logger.info(`nodeRefreshTestSetup: podName: ${podName}`) - return podName - } else { - throw new Error(`pod for ${nodeId} not found`) - } - } - - async function getNodeIdsPrivateKeysHash (networkNodeServicesMap, namespace, keyFormat, k8, destDir) { - const dataKeysDir = `${constants.HEDERA_HAPI_PATH}/data/keys` - const tlsKeysDir = constants.HEDERA_HAPI_PATH - const nodeKeyHashMap = new Map() - for (const networkNodeServices of networkNodeServicesMap.values()) { - const keyHashMap = new Map() - const nodeId = networkNodeServices.nodeName - const uniqueNodeDestDir = path.join(destDir, nodeId) - if (!fs.existsSync(uniqueNodeDestDir)) { - fs.mkdirSync(uniqueNodeDestDir, { recursive: true }) - } - switch (keyFormat) { - case constants.KEY_FORMAT_PFX: - await addKeyHashToMap(k8, nodeId, dataKeysDir, uniqueNodeDestDir, keyHashMap, Templates.renderGossipPfxPrivateKeyFile(nodeId)) - break - case constants.KEY_FORMAT_PEM: - await addKeyHashToMap(k8, nodeId, dataKeysDir, uniqueNodeDestDir, keyHashMap, Templates.renderGossipPemPrivateKeyFile(constants.SIGNING_KEY_PREFIX, nodeId)) - await addKeyHashToMap(k8, nodeId, dataKeysDir, uniqueNodeDestDir, keyHashMap, Templates.renderGossipPemPrivateKeyFile(constants.AGREEMENT_KEY_PREFIX, nodeId)) - break - default: - throw new Error(`invalid keyFormat: ${keyFormat}`) - } - await addKeyHashToMap(k8, nodeId, tlsKeysDir, uniqueNodeDestDir, keyHashMap, 'hedera.key') - nodeKeyHashMap.set(nodeId, keyHashMap) - } - return nodeKeyHashMap - } - - async function addKeyHashToMap (k8, nodeId, keyDir, uniqueNodeDestDir, keyHashMap, privateKeyFileName) { - await k8.copyFrom( - Templates.renderNetworkPodName(nodeId), - ROOT_CONTAINER, - path.join(keyDir, privateKeyFileName), - uniqueNodeDestDir) - const keyBytes = await fs.readFileSync(path.join(uniqueNodeDestDir, privateKeyFileName)) - const keyString = keyBytes.toString() - keyHashMap.set(privateKeyFileName, crypto.createHash('sha256').update(keyString).digest('base64')) - } + }) } diff --git a/test/e2e/setup-e2e.sh b/test/e2e/setup-e2e.sh index 57da9e452..89a7420a2 100755 --- a/test/e2e/setup-e2e.sh +++ b/test/e2e/setup-e2e.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash readonly KIND_IMAGE="kindest/node:v1.27.3@sha256:3966ac761ae0136263ffdb6cfd4db23ef8a83cba8a463690e98317add2c9ba72" echo "SOLO_FST_CHARTS_DIR: ${SOLO_FST_CHARTS_DIR}" +export PATH=${PATH}:~/.solo/bin SOLO_CLUSTER_NAME=solo-e2e SOLO_NAMESPACE=solo-e2e @@ -17,7 +18,7 @@ kind create cluster -n "${SOLO_CLUSTER_NAME}" --image "${KIND_IMAGE}" || exit 1 # Init and deploy a network for e2e tests in (test/e2e/core) # -d ${SOLO_CHARTS_DIR} is optional, if you want to use a local chart, it will be ignored if not set # ********************************************************************************************************************** -solo init --namespace "${SOLO_NAMESPACE}" -i node0 -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" -d "${SOLO_FST_CHARTS_DIR}" --dev || exit 1 # cache args for subsequent commands +solo init --namespace "${SOLO_NAMESPACE}" -i node1 -s "${SOLO_CLUSTER_SETUP_NAMESPACE}" -d "${SOLO_FST_CHARTS_DIR}" --dev || exit 1 # cache args for subsequent commands solo cluster setup || exit 1 helm list --all-namespaces -solo network deploy || exit 1 +sleep 10 # give time for fullstack-setup to finish deploying diff --git a/test/scripts/gen-legacy-keys.sh b/test/scripts/gen-legacy-keys.sh index dd5671e7b..bd3306fc6 100755 --- a/test/scripts/gen-legacy-keys.sh +++ b/test/scripts/gen-legacy-keys.sh @@ -17,7 +17,7 @@ # keysDir="${HOME}/.solo/cache/keys" -ids="node0,node1,node2" +ids="node1,node2,node3" validity=36524 # number of days keytool_path="${HOME}/.solo/bin/jre/bin/keytool" diff --git a/test/scripts/gen-openssl-keys.sh b/test/scripts/gen-openssl-keys.sh index ef8b1dd49..d13a33160 100755 --- a/test/scripts/gen-openssl-keys.sh +++ b/test/scripts/gen-openssl-keys.sh @@ -1,9 +1,8 @@ #!/bin/bash keysDir="${HOME}/.solo/cache/keys" -ids="node0,node1,node2" +ids="node1,node2,node3" validity=36524 # number of days -generate_pfx=false if [ "$#" -gt 0 ]; then ids="${1}" @@ -37,7 +36,6 @@ function backup() { # make a backup of old *.pem files backup "*.pem" -backup "*.pfx" # Generate RSA:3072 key to for signing function generate_signing_key() { @@ -47,8 +45,6 @@ function generate_signing_key() { local s_key="${prefix}-private-${n}.pem" local s_csr="${prefix}-csr-${n}.pem" local s_cert="${prefix}-public-${n}.pem" - local s_key_pfx="${prefix}-private-${n}.pfx" - local s_cert_pfx="${prefix}-public-${n}.pfx" local s_friendly_name="${prefix}-${n}" echo "------------------------------------------------------------------------------------" @@ -57,8 +53,6 @@ function generate_signing_key() { echo "key_file: ${s_key}" echo "csr_file: ${s_csr}" echo "cert_file: ${s_cert}" - echo "key_pfx: ${s_key_pfx}" - echo "cert_pfx: ${s_cert_pfx}" echo "------------------------------------------------------------------------------------" # Generate: s_key, s_csr @@ -73,10 +67,6 @@ function generate_signing_key() { echo "------------------------------------------------------------------------------------" openssl x509 -in "${s_cert}" -text -noout - if [[ "${generate_pfx}" == "true" ]]; then - generate_pfx_files "${s_key}" "${s_cert}" "${s_key_pfx}" "${s_cert_pfx}" "${friendly_name}" - fi - # remove csr rm "${s_csr}" @@ -94,8 +84,6 @@ function generate_signed_key() { local key_file="${prefix}-private-${n}.pem" local csr_file="${prefix}-csr-${n}.pem" local cert_file="${prefix}-public-${n}.pem" - local key_pfx="${prefix}-private-${n}.pfx" - local cert_pfx="${prefix}-public-${n}.pfx" local friendly_name="${prefix}-${n}" echo "------------------------------------------------------------------------------------" @@ -104,8 +92,6 @@ function generate_signed_key() { echo "key_file: ${key_file}" echo "csr_file: ${csr_file}" echo "cert_file: ${cert_file}" - echo "key_pfx: ${key_pfx}" - echo "cert_pfx: ${cert_pfx}" echo "s_key: ${s_key}" echo "s_cert: ${s_cert}" echo "------------------------------------------------------------------------------------" @@ -131,41 +117,12 @@ function generate_signed_key() { echo "------------------------------------------------------------------------------------" openssl storeutl -noout -text -certs "${cert_file}" - if [[ "${generate_pfx}" == "true" ]]; then - generate_pfx_files "${key_file}" "${cert_file}" "${key_pfx}" "${cert_pfx}" "${friendly_name}" - fi - # remove csr rm "${csr_file}" return 0 } -function generate_pfx_files() { - let key_file="${1}" - let cert_file="${2}" - let key_pfx="${3}" - let cert_pfx="${4}" - let friendly_name="${5}" - - # generate private.pfx - openssl pkcs12 -export -out "${key_pfx}" -inkey "${key_file}" -in "${cert_file}" -iter 10000 \ - -name "${friendly_name}" -macsaltlen 20 -password pass:"${dummy_password}" || return 1 - echo "------------------------------------------------------------------------------------" - echo "Generated: ${key_pfx}" - echo "------------------------------------------------------------------------------------" - openssl pkcs12 -info -in "${key_pfx}" -passin pass:"${dummy_password}" -passout pass:"${dummy_password}" -nokeys # do not output private key - - # generate public.pfx - openssl pkcs12 -export -nokeys -out "${cert_pfx}" -in "${cert_file}" -iter 10000 \ - -name "${friendly_name}" -macsaltlen 20 -password pass:"${dummy_password}" -CAfile "${s_cert}" -chain || return 1 - echo "------------------------------------------------------------------------------------" - echo "Generated: ${cert_pfx}" - echo "------------------------------------------------------------------------------------" - #openssl pkcs12 -info -in a-public-node0.pfx -passin pass:password -passout pass:password -nokeys - openssl pkcs12 -info -in "${cert_pfx}" -passin pass:"${dummy_password}" -passout pass:"${dummy_password}" -nokeys -} - for nm in "${names[@]}"; do n="$(echo "${nm}" | tr '[A-Z]' '[a-z]')" s_key="${s_key_prefix}-private-${n}.pem" diff --git a/test/testSequencer.mjs b/test/testSequencer.mjs index 2a3be4e47..05eb5ea6f 100644 --- a/test/testSequencer.mjs +++ b/test/testSequencer.mjs @@ -18,6 +18,7 @@ import Sequencer from '@jest/test-sequencer' import Seedrandom from 'seedrandom' import { NewLogger } from '../src/core/logging.mjs' import chalk from 'chalk' +import path from 'path' export default class testSequencer extends Sequencer.default { logger = NewLogger('debug') @@ -34,9 +35,21 @@ export default class testSequencer extends Sequencer.default { } const randomNumGenerator = new Seedrandom(seed) + const copyTests = Array.from(tests) + // first sort the tests by path to create consistency, otherwise we can't use the seed to recreate the order + copyTests.sort((testA, testB) => { + testA.basename = path.basename(testA.path) + testB.basename = path.basename(testB.path) + return testA.basename.localeCompare(testB.basename) + }) + + for (const test of copyTests) { + // attach a random number to each test to use for sorting + test.randomOrderNumber = randomNumGenerator.int32() + } - // use randomNumGenerator.int32() to generate random even or odd nuber - return copyTests.sort((testA, testB) => (randomNumGenerator.int32() % 2 === 0 ? -1 : 1)) + return copyTests.sort( + (testA, testB) => testA.randomOrderNumber - testB.randomOrderNumber) } } diff --git a/test/test_add.mjs b/test/test_add.mjs new file mode 100644 index 000000000..72ea442ba --- /dev/null +++ b/test/test_add.mjs @@ -0,0 +1,104 @@ +/** + * Copyright (C) 2024 Hedera Hashgraph, LLC + * + * Licensed under the Apache License, Version 2.0 (the ""License""); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an ""AS IS"" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * @jest-environment steps + */ +import { afterAll, describe, expect, it } from '@jest/globals' +import { + accountCreationShouldSucceed, + balanceQueryShouldSucceed, + bootstrapNetwork, + getDefaultArgv, + getNodeIdsPrivateKeysHash, + getTmpDir, + HEDERA_PLATFORM_VERSION_TAG +} from './test_util.js' +import { flags } from '../src/commands/index.mjs' +import { getNodeLogs } from '../src/core/helpers.mjs' +import { NodeCommand } from '../src/commands/node.mjs' + +export function testNodeAdd (localBuildPath +) { + describe('Node add should success', () => { + const suffix = localBuildPath.substring(0, 5) + const defaultTimeout = 120000 + const namespace = 'node-add' + suffix + const argv = getDefaultArgv() + argv[flags.nodeIDs.name] = 'node1,node2,node3' + argv[flags.generateGossipKeys.name] = true + argv[flags.generateTlsKeys.name] = true + // set the env variable SOLO_FST_CHARTS_DIR if developer wants to use local FST charts + argv[flags.chartDirectory.name] = process.env.SOLO_FST_CHARTS_DIR ? process.env.SOLO_FST_CHARTS_DIR : undefined + argv[flags.releaseTag.name] = HEDERA_PLATFORM_VERSION_TAG + argv[flags.namespace.name] = namespace + argv[flags.force.name] = true + argv[flags.persistentVolumeClaims.name] = true + argv[flags.localBuildPath.name] = localBuildPath + + const bootstrapResp = bootstrapNetwork(namespace, argv) + const nodeCmd = bootstrapResp.cmd.nodeCmd + const accountCmd = bootstrapResp.cmd.accountCmd + const networkCmd = bootstrapResp.cmd.networkCmd + const k8 = bootstrapResp.opts.k8 + let existingServiceMap + let existingNodeIdsPrivateKeysHash + + afterAll(async () => { + await getNodeLogs(k8, namespace) + await nodeCmd.accountManager.close() + await nodeCmd.stop(argv) + await networkCmd.destroy(argv) + await k8.deleteNamespace(namespace) + }, 600000) + + it('cache current version of private keys', async () => { + existingServiceMap = await nodeCmd.accountManager.getNodeServiceMap(namespace) + existingNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + }, defaultTimeout) + + it('should succeed with init command', async () => { + const status = await accountCmd.init(argv) + expect(status).toBeTruthy() + }, 450000) + + it('should add a new node to the network successfully', async () => { + await nodeCmd.add(argv) + expect(nodeCmd.getUnusedConfigs(NodeCommand.ADD_CONFIGS_NAME)).toEqual([ + flags.app.constName, + flags.chainId.constName, + flags.devMode.constName, + flags.adminKey.constName + ]) + await nodeCmd.accountManager.close() + }, 800000) + + balanceQueryShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + accountCreationShouldSucceed(nodeCmd.accountManager, nodeCmd, namespace) + + it('existing nodes private keys should not have changed', async () => { + const currentNodeIdsPrivateKeysHash = await getNodeIdsPrivateKeysHash(existingServiceMap, namespace, k8, getTmpDir()) + + for (const [nodeId, existingKeyHashMap] of existingNodeIdsPrivateKeysHash.entries()) { + const currentNodeKeyHashMap = currentNodeIdsPrivateKeysHash.get(nodeId) + + for (const [keyFileName, existingKeyHash] of existingKeyHashMap.entries()) { + expect(`${nodeId}:${keyFileName}:${currentNodeKeyHashMap.get(keyFileName)}`).toEqual( + `${nodeId}:${keyFileName}:${existingKeyHash}`) + } + } + }, defaultTimeout) + }) +} diff --git a/test/test_util.js b/test/test_util.js index bb2345279..9987d3bad 100644 --- a/test/test_util.js +++ b/test/test_util.js @@ -13,6 +13,7 @@ * See the License for the specific language governing permissions and * limitations under the License. * + * @jest-environment steps */ import { afterAll, beforeAll, describe, expect, it } from '@jest/globals' import fs from 'fs' @@ -39,14 +40,21 @@ import { KeyManager, logging, PackageDownloader, - PlatformInstaller, ProfileManager, + PlatformInstaller, ProfileManager, Templates, Zippy } from '../src/core/index.mjs' -import { AccountBalanceQuery } from '@hashgraph/sdk' +import { + AccountBalanceQuery, + AccountCreateTransaction, Hbar, HbarUnit, + PrivateKey +} from '@hashgraph/sdk' +import { ROOT_CONTAINER } from '../src/core/constants.mjs' +import crypto from 'crypto' +import { AccountCommand } from '../src/commands/account.mjs' export const testLogger = logging.NewLogger('debug', true) export const TEST_CLUSTER = 'solo-e2e' -export const HEDERA_PLATFORM_VERSION_TAG = 'v0.49.0-alpha.2' +export const HEDERA_PLATFORM_VERSION_TAG = 'v0.54.0-alpha.4' export function getTestCacheDir (testName) { const baseDir = 'test/data/tmp' @@ -93,13 +101,15 @@ export function getDefaultArgv () { * @param clusterCmdArg an instance of command/ClusterCommand * @param networkCmdArg an instance of command/NetworkCommand * @param nodeCmdArg an instance of command/NodeCommand + * @param accountCmdArg an instance of command/AccountCommand */ export function bootstrapTestVariables (testName, argv, k8Arg = null, initCmdArg = null, clusterCmdArg = null, networkCmdArg = null, - nodeCmdArg = null + nodeCmdArg = null, + accountCmdArg = null ) { const namespace = argv[flags.namespace.name] || 'bootstrap-ns' const cacheDir = argv[flags.cacheDir.name] || getTestCacheDir(testName) @@ -139,6 +149,7 @@ export function bootstrapTestVariables (testName, argv, const clusterCmd = clusterCmdArg || new ClusterCommand(opts) const networkCmd = networkCmdArg || new NetworkCommand(opts) const nodeCmd = nodeCmdArg || new NodeCommand(opts) + const accountCmd = accountCmdArg || new AccountCommand(opts, constants.SHORTER_SYSTEM_ACCOUNTS) return { namespace, opts, @@ -146,7 +157,8 @@ export function bootstrapTestVariables (testName, argv, initCmd, clusterCmd, networkCmd, - nodeCmd + nodeCmd, + accountCmd } } } @@ -161,15 +173,19 @@ export function bootstrapTestVariables (testName, argv, * @param clusterCmdArg an instance of command/ClusterCommand * @param networkCmdArg an instance of command/NetworkCommand * @param nodeCmdArg an instance of command/NodeCommand + * @param accountCmdArg an instance of command/AccountCommand + * @param startNodes start nodes after deployment, default is true */ export function bootstrapNetwork (testName, argv, k8Arg = null, initCmdArg = null, clusterCmdArg = null, networkCmdArg = null, - nodeCmdArg = null + nodeCmdArg = null, + accountCmdArg = null, + startNodes = true ) { - const bootstrapResp = bootstrapTestVariables(testName, argv, k8Arg, initCmdArg, clusterCmdArg, networkCmdArg, nodeCmdArg) + const bootstrapResp = bootstrapTestVariables(testName, argv, k8Arg, initCmdArg, clusterCmdArg, networkCmdArg, nodeCmdArg, accountCmdArg) const namespace = bootstrapResp.namespace const initCmd = bootstrapResp.cmd.initCmd const k8 = bootstrapResp.opts.k8 @@ -178,7 +194,7 @@ export function bootstrapNetwork (testName, argv, const nodeCmd = bootstrapResp.cmd.nodeCmd const chartManager = bootstrapResp.opts.chartManager - describe(`Bootstrap network for test [release ${argv[flags.releaseTag.name]}, keyFormat: ${argv[flags.keyFormat.name]}]`, () => { + describe(`Bootstrap network for test [release ${argv[flags.releaseTag.name]}}]`, () => { beforeAll(() => { bootstrapResp.opts.logger.showUser(`------------------------- START: bootstrap (${testName}) ----------------------------`) }) @@ -204,29 +220,58 @@ export function bootstrapNetwork (testName, argv, } }, 120000) + it('generate key files', async () => { + await expect(nodeCmd.keys(argv)).resolves.toBeTruthy() + expect(nodeCmd.getUnusedConfigs(NodeCommand.KEYS_CONFIGS_NAME)).toEqual([ + flags.cacheDir.constName, + flags.devMode.constName + ]) + }, 120000) + it('should succeed with network deploy', async () => { + expect.assertions(1) await networkCmd.deploy(argv) + + expect(networkCmd.getUnusedConfigs(NetworkCommand.DEPLOY_CONFIGS_NAME)).toEqual([ + flags.apiPermissionProperties.constName, + flags.applicationEnv.constName, + flags.applicationProperties.constName, + flags.bootstrapProperties.constName, + flags.chainId.constName, + flags.log4j2Xml.constName, + flags.profileFile.constName, + flags.profileName.constName, + flags.settingTxt.constName + ]) }, 180000) - it('should succeed with node setup command', async () => { - expect.assertions(1) - try { - await expect(nodeCmd.setup(argv)).resolves.toBeTruthy() - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } - }, 240000) + if (startNodes) { + it('should succeed with node setup command', async () => { + expect.assertions(2) + // cache this, because `solo node setup.finalize()` will reset it to false + try { + await expect(nodeCmd.setup(argv)).resolves.toBeTruthy() + expect(nodeCmd.getUnusedConfigs(NodeCommand.SETUP_CONFIGS_NAME)).toEqual([ + flags.app.constName, + flags.appConfig.constName, + flags.devMode.constName + ]) + } catch (e) { + nodeCmd.logger.showUserError(e) + expect(e).toBeNull() + } + }, 240000) - it('should succeed with node start command', async () => { - expect.assertions(1) - try { - await expect(nodeCmd.start(argv)).resolves.toBeTruthy() - } catch (e) { - nodeCmd.logger.showUserError(e) - expect(e).toBeNull() - } - }, 1800000) + it('should succeed with node start command', async () => { + expect.assertions(1) + try { + await expect(nodeCmd.start(argv)).resolves.toBeTruthy() + } catch (e) { + nodeCmd.logger.showUserError(e) + expect(e).toBeNull() + } + }, 1800000) + } }) return bootstrapResp @@ -253,3 +298,65 @@ export function balanceQueryShouldSucceed (accountManager, cmd, namespace) { await sleep(1000) }, 120000) } + +export function accountCreationShouldSucceed (accountManager, nodeCmd, namespace) { + it('Account creation should succeed', async () => { + expect.assertions(3) + + try { + await accountManager.loadNodeClient(namespace) + expect(accountManager._nodeClient).not.toBeNull() + const privateKey = PrivateKey.generate() + const amount = 100 + + const newAccount = await new AccountCreateTransaction() + .setKey(privateKey) + .setInitialBalance(Hbar.from(amount, HbarUnit.Hbar)) + .execute(accountManager._nodeClient) + + // Get the new account ID + const getReceipt = await newAccount.getReceipt(accountManager._nodeClient) + const accountInfo = { + accountId: getReceipt.accountId.toString(), + privateKey: privateKey.toString(), + publicKey: privateKey.publicKey.toString(), + balance: amount + } + + expect(accountInfo.accountId).not.toBeNull() + expect(accountInfo.balance).toEqual(amount) + } catch (e) { + nodeCmd.logger.showUserError(e) + expect(e).toBeNull() + } + }, 120000) +} + +export async function getNodeIdsPrivateKeysHash (networkNodeServicesMap, namespace, k8, destDir) { + const dataKeysDir = path.join(constants.HEDERA_HAPI_PATH, 'data', 'keys') + const tlsKeysDir = constants.HEDERA_HAPI_PATH + const nodeKeyHashMap = new Map() + for (const networkNodeServices of networkNodeServicesMap.values()) { + const keyHashMap = new Map() + const nodeId = networkNodeServices.nodeName + const uniqueNodeDestDir = path.join(destDir, nodeId) + if (!fs.existsSync(uniqueNodeDestDir)) { + fs.mkdirSync(uniqueNodeDestDir, { recursive: true }) + } + await addKeyHashToMap(k8, nodeId, dataKeysDir, uniqueNodeDestDir, keyHashMap, Templates.renderGossipPemPrivateKeyFile(constants.SIGNING_KEY_PREFIX, nodeId)) + await addKeyHashToMap(k8, nodeId, tlsKeysDir, uniqueNodeDestDir, keyHashMap, 'hedera.key') + nodeKeyHashMap.set(nodeId, keyHashMap) + } + return nodeKeyHashMap +} + +async function addKeyHashToMap (k8, nodeId, keyDir, uniqueNodeDestDir, keyHashMap, privateKeyFileName) { + await k8.copyFrom( + Templates.renderNetworkPodName(nodeId), + ROOT_CONTAINER, + path.join(keyDir, privateKeyFileName), + uniqueNodeDestDir) + const keyBytes = await fs.readFileSync(path.join(uniqueNodeDestDir, privateKeyFileName)) + const keyString = keyBytes.toString() + keyHashMap.set(privateKeyFileName, crypto.createHash('sha256').update(keyString).digest('base64')) +} diff --git a/test/unit/commands/base.test.mjs b/test/unit/commands/base.test.mjs index a91e5b3de..943671dd4 100644 --- a/test/unit/commands/base.test.mjs +++ b/test/unit/commands/base.test.mjs @@ -25,6 +25,7 @@ import { } from '../../../src/core/index.mjs' import { BaseCommand } from '../../../src/commands/base.mjs' import { K8 } from '../../../src/core/k8.mjs' +import * as flags from '../../../src/commands/flags.mjs' const testLogger = logging.NewLogger('debug', true) @@ -57,5 +58,68 @@ describe('BaseCommand', () => { it('should succeed during valid program check', async () => { await expect(baseCmd.run('echo')).resolves.not.toBeNull() }) + it('getConfig tracks property usage', async () => { + const flagsList = [ + flags.releaseTag, + flags.tlsClusterIssuerType, + flags.valuesFile + ] + const argv = {} + argv[flags.releaseTag.name] = 'releaseTag1' + argv[flags.tlsClusterIssuerType.name] = 'type2' + argv[flags.valuesFile.name] = 'file3' + configManager.update(argv) + + const extraVars = ['var1', 'var2'] + + /** + * @typedef {Object} newClassInstance + * @property {string} releaseTag + * @property {string} tlsClusterIssuerType + * @property {string} valuesFile + * @property {string} var1 + * @property {string} var2 + * @property {getUnusedConfigs} getUnusedConfigs + */ + /** + * @callback getUnusedConfigs + * @returns {string[]} + */ + + const NEW_CLASS1_NAME = 'newClassInstance1' + const newClassInstance1 = /** @type {newClassInstance} **/ baseCmd.getConfig(NEW_CLASS1_NAME, flagsList, extraVars) + expect(newClassInstance1.releaseTag).toBe('releaseTag1') + expect(newClassInstance1.tlsClusterIssuerType).toBe('type2') + expect(newClassInstance1.valuesFile).toBe('file3') + expect(newClassInstance1.var1).toBe('') + expect(newClassInstance1.var2).toBe('') + expect(baseCmd.getUnusedConfigs(NEW_CLASS1_NAME)).toEqual([]) + + const NEW_CLASS2_NAME = 'newClassInstance2' + const newClassInstance2 = /** @type {newClassInstance} **/ baseCmd.getConfig(NEW_CLASS2_NAME, flagsList, extraVars) + newClassInstance2.var1 = 'var1' + newClassInstance2.var2 = 'var2' + expect(newClassInstance2.var1).toBe('var1') + expect(newClassInstance2.var2).toBe('var2') + expect(baseCmd.getUnusedConfigs(NEW_CLASS2_NAME)).toEqual([ + flags.releaseTag.constName, + flags.tlsClusterIssuerType.constName, + flags.valuesFile.constName + ]) + + const NEW_CLASS3_NAME = 'newClassInstance3' + const newClassInstance3 = /** @type {newClassInstance} **/ baseCmd.getConfig(NEW_CLASS3_NAME, flagsList, extraVars) + newClassInstance3.var1 = 'var1' + expect(newClassInstance3.var1).toBe('var1') + expect(newClassInstance3.tlsClusterIssuerType).toBe('type2') + expect(baseCmd.getUnusedConfigs(NEW_CLASS3_NAME)).toEqual([ + flags.releaseTag.constName, + flags.valuesFile.constName, + 'var2' + ]) + + const newClassInstance4 = baseCmd.getConfig('newClassInstance4', []) + expect(newClassInstance4.getUnusedConfigs()).toEqual([]) + }) }) }) diff --git a/test/unit/core/helpers.test.mjs b/test/unit/core/helpers.test.mjs index d8a935559..913f149b3 100644 --- a/test/unit/core/helpers.test.mjs +++ b/test/unit/core/helpers.test.mjs @@ -25,12 +25,12 @@ describe('Helpers', () => { output: [] }, { - input: 'node0', - output: ['node0'] + input: 'node1', + output: ['node1'] }, { - input: 'node0,node2', - output: ['node0', 'node2'] + input: 'node1,node3', + output: ['node1', 'node3'] } ])('should be able to parse node ID', (t) => { expect(helpers.parseNodeIds(t.input)).toStrictEqual(t.output) diff --git a/test/unit/core/key_manager.test.mjs b/test/unit/core/key_manager.test.mjs index 07a9c1db4..5abee45a6 100644 --- a/test/unit/core/key_manager.test.mjs +++ b/test/unit/core/key_manager.test.mjs @@ -18,9 +18,7 @@ import { describe, expect, it } from '@jest/globals' import fs from 'fs' import os from 'os' import path from 'path' -import { KeytoolDependencyManager } from '../../../src/core/dependency_managers/index.mjs' -import { constants, Keytool, logging, PackageDownloader, Zippy, KeyManager } from '../../../src/core/index.mjs' -import { getTmpDir, testLogger } from '../../test_util.js' +import { constants, logging, KeyManager } from '../../../src/core/index.mjs' describe('KeyManager', () => { const logger = logging.NewLogger('debug', true) @@ -28,7 +26,7 @@ describe('KeyManager', () => { it('should generate signing key', async () => { const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'keys-')) - const nodeId = 'node0' + const nodeId = 'node1' const keyPrefix = constants.SIGNING_KEY_PREFIX const signingKey = await keyManager.generateSigningKey(nodeId) @@ -53,34 +51,9 @@ describe('KeyManager', () => { fs.rmSync(tmpDir, { recursive: true }) }) - it('should generate agreement key', async () => { - const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'keys-')) - const nodeId = 'node0' - - const signingKeyFiles = keyManager.prepareNodeKeyFilePaths(nodeId, 'test/data', constants.SIGNING_KEY_PREFIX) - const signignKey = await keyManager.loadNodeKey(nodeId, 'test/data', KeyManager.SigningKeyAlgo, signingKeyFiles) - const agreementKey = await keyManager.generateAgreementKey(nodeId, signignKey) - - const files = await keyManager.storeAgreementKey(nodeId, agreementKey, tmpDir) - expect(files.privateKeyFile).not.toBeNull() - expect(files.certificateFile).not.toBeNull() - - const nodeKey = await keyManager.loadAgreementKey(nodeId, tmpDir) - expect(nodeKey.certificate).toStrictEqual(agreementKey.certificate) - expect(nodeKey.privateKey.algorithm).toStrictEqual(agreementKey.privateKey.algorithm) - expect(nodeKey.privateKey.type).toStrictEqual(agreementKey.privateKey.type) - - await expect(agreementKey.certificate.verify({ - publicKey: signignKey.certificate.publicKey, - signatureOnly: true - })).resolves.toBeTruthy() - - fs.rmSync(tmpDir, { recursive: true }) - }) - it('should generate TLS key', async () => { const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'keys-')) - const nodeId = 'node0' + const nodeId = 'node1' const keyName = 'TLS' const tlsKey = await keyManager.generateGrpcTLSKey(nodeId) @@ -107,33 +80,4 @@ describe('KeyManager', () => { fs.rmSync(tmpDir, { recursive: true }) }, 20000) - - it('should generate pfx keys', async () => { - const keysDir = getTmpDir() - const tmpDir = getTmpDir() - const nodeIds = ['node0', 'node1', 'node2'] - const downloader = new PackageDownloader(testLogger) - const zippy = new Zippy(testLogger) - const keytoolDepManager = new KeytoolDependencyManager(downloader, zippy, testLogger) - await keytoolDepManager.checkVersion() - const keytool = new Keytool(testLogger) - for (const nodeId of nodeIds) { - const result = await keyManager.generatePrivatePfxKeys(keytool, nodeId, keysDir, tmpDir) - const expectedPrivatePfx = path.join(keysDir, `private-${nodeId}.pfx`) - expect(result).toStrictEqual(expectedPrivatePfx) - expect(fs.existsSync(expectedPrivatePfx)).toBeTruthy() - const output = await keytool.list(`-storetype pkcs12 -storepass password -keystore ${expectedPrivatePfx}`) - expect(output.includes('Your keystore contains 3 entries')).toBeTruthy() - } - - const result = await keyManager.updatePublicPfxKey(keytool, nodeIds, keysDir, tmpDir) - const expectedPublicPfx = path.join(keysDir, constants.PUBLIC_PFX) - expect(result).toStrictEqual(expectedPublicPfx) - expect(fs.existsSync(expectedPublicPfx)).toBeTruthy() - - const output = await keytool.list(`-storetype pkcs12 -storepass password -keystore ${expectedPublicPfx}`) - expect(output.includes('Your keystore contains 9 entries')).toBeTruthy() - fs.rmSync(keysDir, { recursive: true }) - fs.rmSync(tmpDir, { recursive: true }) - }, 120000) }) diff --git a/test/unit/core/keytool.test.mjs b/test/unit/core/keytool.test.mjs index 162ad4fd9..7cd44bc85 100644 --- a/test/unit/core/keytool.test.mjs +++ b/test/unit/core/keytool.test.mjs @@ -29,32 +29,27 @@ describe.each([ const keytoolPath = Templates.installationPath(constants.KEYTOOL, input.osPlatform) it(`should run keytool -genkeypair [${input.osPlatform}]`, async () => { - await keytool.genKeyPair('-alias s-node0') - expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -genkeypair -alias s-node0`, true) + await keytool.genKeyPair('-alias s-node1') + expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -genkeypair -alias s-node1`, true) }) it(`should run keytool -certreq [${input.osPlatform}]`, async () => { - await keytool.certReq('-alias s-node0') - expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -certreq -alias s-node0`, true) + await keytool.certReq('-alias s-node1') + expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -certreq -alias s-node1`, true) }) it(`should run keytool -gencert [${input.osPlatform}]`, async () => { - await keytool.genCert('-alias s-node0') - expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -gencert -alias s-node0`, true) + await keytool.genCert('-alias s-node1') + expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -gencert -alias s-node1`, true) }) it(`should run keytool -importcert [${input.osPlatform}]`, async () => { - await keytool.importCert('-alias s-node0') - expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -importcert -alias s-node0`, true) + await keytool.importCert('-alias s-node1') + expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -importcert -alias s-node1`, true) }) it(`should run keytool -exportcert [${input.osPlatform}]`, async () => { - await keytool.exportCert('-alias s-node0') - expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -exportcert -alias s-node0`, true) - }) - - it(`should run keytool -list [${input.osPlatform}]`, async () => { - await keytool.list('-keystore private-node0.pfx') - expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -list -keystore private-node0.pfx`, true) + await keytool.exportCert('-alias s-node1') + expect(shellSpy).toHaveBeenCalledWith(`${keytoolPath} -exportcert -alias s-node1`, true) }) }) diff --git a/test/unit/core/platform_installer.test.mjs b/test/unit/core/platform_installer.test.mjs index 6f2ba75ed..b3fc21dd2 100644 --- a/test/unit/core/platform_installer.test.mjs +++ b/test/unit/core/platform_installer.test.mjs @@ -99,25 +99,7 @@ describe('PackageInstaller', () => { }) it('should fail for missing tag', async () => { expect.assertions(1) - await expect(installer.fetchPlatform('network-node0-0', '')).rejects.toThrow(MissingArgumentError) - }) - }) - - describe('prepareConfigTxt', () => { - it('should fail for missing nodeIDs', async () => { - await expect(installer.prepareConfigTxt([], './test', '0.42.0')).rejects.toThrow(MissingArgumentError) - }) - - it('should fail for missing destPath', async () => { - await expect(installer.prepareConfigTxt(['node0'], '', '0.42.0')).rejects.toThrow(MissingArgumentError) - }) - - it('should fail for missing release tag', async () => { - await expect(installer.prepareConfigTxt(['node0'], `${os.tmpdir()}/config.txt`, '')).rejects.toThrow(MissingArgumentError) - }) - - it('should fail for invalid destPath', async () => { - await expect(installer.prepareConfigTxt(['node0'], '/INVALID/config.txt', '0.42.0')).rejects.toThrow(IllegalArgumentError) + await expect(installer.fetchPlatform('network-node1-0', '')).rejects.toThrow(MissingArgumentError) }) }) @@ -127,7 +109,7 @@ describe('PackageInstaller', () => { }) it('should fail for missing stagingDir path', async () => { - await expect(installer.copyGossipKeys('network-node0-0', '')).rejects.toThrow(MissingArgumentError) + await expect(installer.copyGossipKeys('node1', '')).rejects.toThrow(MissingArgumentError) }) }) }) diff --git a/test/unit/core/profile_manager.test.mjs b/test/unit/core/profile_manager.test.mjs index a09b1e42a..e70be5072 100644 --- a/test/unit/core/profile_manager.test.mjs +++ b/test/unit/core/profile_manager.test.mjs @@ -19,15 +19,28 @@ import fs from 'fs' import * as yaml from 'js-yaml' import path from 'path' import { flags } from '../../../src/commands/index.mjs' -import { ConfigManager, ProfileManager } from '../../../src/core/index.mjs' -import { getTmpDir, testLogger } from '../../test_util.js' +import { + ConfigManager, + constants, + ProfileManager +} from '../../../src/core/index.mjs' +import { getTestCacheDir, getTmpDir, testLogger } from '../../test_util.js' +import * as version from '../../../version.mjs' const tmpDir = getTmpDir() const configFile = path.join(tmpDir, 'resource-manager.config') const configManager = new ConfigManager(testLogger, configFile) const profileManager = new ProfileManager(testLogger, configManager, tmpDir) -configManager.setFlag(flags.nodeIDs, 'node0,node1,node3') -const testProfileFile = path.resolve('test/data/test-profiles.yaml') +configManager.setFlag(flags.nodeIDs, 'node1,node2,node4') +const testProfileFile = path.join('test', 'data', 'test-profiles.yaml') +configManager.setFlag(flags.cacheDir, getTestCacheDir('ProfileManager')) +configManager.setFlag(flags.releaseTag, version.HEDERA_PLATFORM_VERSION) +const cacheDir = configManager.getFlag(flags.cacheDir) +configManager.setFlag(flags.apiPermissionProperties, path.join(cacheDir, 'templates', 'api-permission.properties')) +configManager.setFlag(flags.applicationProperties, path.join(cacheDir, 'templates', 'application.properties')) +configManager.setFlag(flags.bootstrapProperties, path.join(cacheDir, 'templates', 'bootstrap.properties')) +configManager.setFlag(flags.log4j2Xml, path.join(cacheDir, 'templates', 'log4j2.xml')) +configManager.setFlag(flags.settingTxt, path.join(cacheDir, 'templates', 'settings.txt')) describe('ProfileManager', () => { afterAll(() => { @@ -62,6 +75,20 @@ describe('ProfileManager', () => { ])('determine chart values for a profile', (input) => { it(`should determine FST chart values [profile = ${input.profileName}]`, async () => { configManager.setFlag(flags.profileFile, input.profileFile) + + const resources = ['templates', 'profiles'] + for (const dirName of resources) { + const srcDir = path.resolve(path.join(constants.RESOURCES_DIR, dirName)) + if (!fs.existsSync(srcDir)) continue + + const destDir = path.resolve(path.join(cacheDir, dirName)) + if (!fs.existsSync(destDir)) { + fs.mkdirSync(destDir, { recursive: true }) + } + + fs.cpSync(srcDir, destDir, { recursive: true }) + } + profileManager.loadProfiles(true) const valuesFile = await profileManager.prepareValuesForFstChart(input.profileName) expect(valuesFile).not.toBeNull() @@ -93,6 +120,8 @@ describe('ProfileManager', () => { it(`should determine mirror-node chart values [profile = ${input.profileName}]`, async () => { configManager.setFlag(flags.profileFile, input.profileFile) + configManager.setFlag(flags.cacheDir, getTestCacheDir('ProfileManager')) + configManager.setFlag(flags.releaseTag, version.HEDERA_PLATFORM_VERSION) profileManager.loadProfiles(true) const valuesFile = await profileManager.prepareValuesForMirrorNodeChart(input.profileName) expect(fs.existsSync(valuesFile)).toBeTruthy() @@ -126,12 +155,68 @@ describe('ProfileManager', () => { it('prepareValuesForFstChart should set the value of a key to the contents of a file', async () => { configManager.setFlag(flags.profileFile, testProfileFile) + // profileManager.loadProfiles(true) const file = path.join(tmpDir, '_setFileContentsAsValue.txt') const fileContents = '# row 1\n# row 2\n# row 3' fs.writeFileSync(file, fileContents) - const cachedValuesFile = await profileManager.prepareValuesForFstChart('test', file) + configManager.setFlag(flags.applicationEnv, file) + const cachedValuesFile = await profileManager.prepareValuesForFstChart('test') const valuesYaml = yaml.load(fs.readFileSync(cachedValuesFile).toString()) expect(valuesYaml.hedera.configMaps.applicationEnv).toEqual(fileContents) }) + + describe('prepareConfigText', () => { + it('should write and return the path to the config.txt file', () => { + const nodeAccountMap = /** @type {Map} */ new Map() + nodeAccountMap.set('node1', '0.0.3') + nodeAccountMap.set('node2', '0.0.4') + nodeAccountMap.set('node3', '0.0.5') + const destPath = path.join(tmpDir, 'staging') + fs.mkdirSync(destPath, { recursive: true }) + const namespace = 'test-namespace' + profileManager.prepareConfigTxt(namespace, nodeAccountMap, destPath, version.HEDERA_PLATFORM_VERSION) + + // expect that the config.txt file was created and exists + const configFile = path.join(destPath, 'config.txt') + expect(fs.existsSync(configFile)).toBeTruthy() + + const configText = fs.readFileSync(configFile).toString() + + // expect that the config.txt file contains the namespace + expect(configText.includes(namespace)).toBeTruthy() + // expect that the config.txt file contains the node account IDs + expect(configText.includes('0.0.3')).toBeTruthy() + expect(configText.includes('0.0.4')).toBeTruthy() + expect(configText.includes('0.0.5')).toBeTruthy() + // expect the config.txt file to contain the node IDs + expect(configText.includes('node1')).toBeTruthy() + expect(configText.includes('node2')).toBeTruthy() + expect(configText.includes('node3')).toBeTruthy() + }) + + it('should fail when no nodeIDs', () => { + const nodeAccountMap = /** @type {Map} */ new Map() + expect(() => profileManager.prepareConfigTxt('', nodeAccountMap, '', version.HEDERA_PLATFORM_VERSION)).toThrow('nodeAccountMap the map of node IDs to account IDs is required') + }) + + it('should fail when no releaseTag is provided', () => { + const nodeAccountMap = /** @type {Map} */ new Map() + nodeAccountMap.set('node1', '0.0.3') + expect(() => profileManager.prepareConfigTxt('', nodeAccountMap, '', undefined)).toThrow('release tag is required') + }) + + it('should fail when destPath does not exist', () => { + expect.assertions(2) + const nodeAccountMap = /** @type {Map} */ new Map() + nodeAccountMap.set('node1', '0.0.3') + const destPath = path.join(tmpDir, 'missing-directory') + try { + profileManager.prepareConfigTxt('', nodeAccountMap, destPath, version.HEDERA_PLATFORM_VERSION) + } catch (e) { + expect(e.message).toContain('config destPath does not exist') + expect(e.message).toContain(destPath) + } + }) + }) }) diff --git a/version.mjs b/version.mjs index d7a48ab59..34a14e4ae 100644 --- a/version.mjs +++ b/version.mjs @@ -21,5 +21,5 @@ export const JAVA_VERSION = '21.0.1+12' export const HELM_VERSION = 'v3.14.2' -export const FST_CHART_VERSION = 'v0.28.2' -export const HEDERA_PLATFORM_VERSION = 'v0.49.0-alpha.2' +export const FST_CHART_VERSION = 'v0.30.0' +export const HEDERA_PLATFORM_VERSION = 'v0.54.0-alpha.4'