From c8b857c191d0a986b07528058279b130aa5d84f3 Mon Sep 17 00:00:00 2001 From: Kevin Puthusseri Date: Tue, 4 Jan 2022 12:02:29 -0800 Subject: [PATCH 001/243] Setting up a basic directory --- sdks/node-ts/package-lock.json | 33 +++++++++++++++++++++++++++++++++ sdks/node-ts/package.json | 5 +++++ sdks/node-ts/src/app.ts | 1 + sdks/node-ts/tsconfig.json | 22 ++++++++++++++++++++++ 4 files changed, 61 insertions(+) create mode 100644 sdks/node-ts/package-lock.json create mode 100644 sdks/node-ts/package.json create mode 100644 sdks/node-ts/src/app.ts create mode 100644 sdks/node-ts/tsconfig.json diff --git a/sdks/node-ts/package-lock.json b/sdks/node-ts/package-lock.json new file mode 100644 index 000000000000..c6f606415785 --- /dev/null +++ b/sdks/node-ts/package-lock.json @@ -0,0 +1,33 @@ +{ + "name": "node-ts", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "devDependencies": { + "typescript": "^4.5.4" + } + }, + "node_modules/typescript": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", + "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", + "dev": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + } + }, + "dependencies": { + "typescript": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", + "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", + "dev": true + } + } +} diff --git a/sdks/node-ts/package.json b/sdks/node-ts/package.json new file mode 100644 index 000000000000..ea20c48097fc --- /dev/null +++ b/sdks/node-ts/package.json @@ -0,0 +1,5 @@ +{ + "devDependencies": { + "typescript": "^4.5.4" + } +} diff --git a/sdks/node-ts/src/app.ts b/sdks/node-ts/src/app.ts new file mode 100644 index 000000000000..8ab8adc055be --- /dev/null +++ b/sdks/node-ts/src/app.ts @@ -0,0 +1 @@ +console.log("Setting up a barebones structure"); \ No newline at end of file diff --git a/sdks/node-ts/tsconfig.json b/sdks/node-ts/tsconfig.json new file mode 100644 index 000000000000..d859330ac450 --- /dev/null +++ b/sdks/node-ts/tsconfig.json @@ -0,0 +1,22 @@ +{ + "compilerOptions": { + "module": "commonjs", + "esModuleInterop": true, + "allowSyntheticDefaultImports": true, + "target": "es6", + "noImplicitAny": true, + "moduleResolution": "node", + "sourceMap": true, + "outDir": "dist/", + "baseUrl": ".", + "paths": { + "*": [ + "node_modules/*", + "src/types/*" + ] + } + }, + "include": [ + "src/**/*" + ] +} \ No newline at end of file From f55bc27b9bdaf84010ffd83ef8c00f7ea04c2206 Mon Sep 17 00:00:00 2001 From: Kevin Puthusseri Date: Tue, 4 Jan 2022 12:45:30 -0800 Subject: [PATCH 002/243] Mirroring Python SDK's directory structure --- sdks/node-ts/src/apache_beam/coders/README.md | 0 sdks/node-ts/src/apache_beam/examples/README.md | 0 sdks/node-ts/src/apache_beam/internal/README.md | 0 sdks/node-ts/src/apache_beam/io/README.md | 0 sdks/node-ts/src/apache_beam/metrics/README.md | 0 sdks/node-ts/src/apache_beam/options/README.md | 0 sdks/node-ts/src/apache_beam/portability/README.md | 0 sdks/node-ts/src/apache_beam/runenrs/README.md | 0 sdks/node-ts/src/apache_beam/testing/README.md | 0 sdks/node-ts/src/apache_beam/tools/README.md | 0 sdks/node-ts/src/apache_beam/transforms/README.md | 0 sdks/node-ts/src/apache_beam/utils/README.md | 0 sdks/node-ts/src/container/README.md | 0 13 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdks/node-ts/src/apache_beam/coders/README.md create mode 100644 sdks/node-ts/src/apache_beam/examples/README.md create mode 100644 sdks/node-ts/src/apache_beam/internal/README.md create mode 100644 sdks/node-ts/src/apache_beam/io/README.md create mode 100644 sdks/node-ts/src/apache_beam/metrics/README.md create mode 100644 sdks/node-ts/src/apache_beam/options/README.md create mode 100644 sdks/node-ts/src/apache_beam/portability/README.md create mode 100644 sdks/node-ts/src/apache_beam/runenrs/README.md create mode 100644 sdks/node-ts/src/apache_beam/testing/README.md create mode 100644 sdks/node-ts/src/apache_beam/tools/README.md create mode 100644 sdks/node-ts/src/apache_beam/transforms/README.md create mode 100644 sdks/node-ts/src/apache_beam/utils/README.md create mode 100644 sdks/node-ts/src/container/README.md diff --git a/sdks/node-ts/src/apache_beam/coders/README.md b/sdks/node-ts/src/apache_beam/coders/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/examples/README.md b/sdks/node-ts/src/apache_beam/examples/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/internal/README.md b/sdks/node-ts/src/apache_beam/internal/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/io/README.md b/sdks/node-ts/src/apache_beam/io/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/metrics/README.md b/sdks/node-ts/src/apache_beam/metrics/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/options/README.md b/sdks/node-ts/src/apache_beam/options/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/portability/README.md b/sdks/node-ts/src/apache_beam/portability/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/runenrs/README.md b/sdks/node-ts/src/apache_beam/runenrs/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/testing/README.md b/sdks/node-ts/src/apache_beam/testing/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/tools/README.md b/sdks/node-ts/src/apache_beam/tools/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/transforms/README.md b/sdks/node-ts/src/apache_beam/transforms/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/apache_beam/utils/README.md b/sdks/node-ts/src/apache_beam/utils/README.md new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/sdks/node-ts/src/container/README.md b/sdks/node-ts/src/container/README.md new file mode 100644 index 000000000000..e69de29bb2d1 From 6c651191cd623034bce6e4b773baa5a4c910ba14 Mon Sep 17 00:00:00 2001 From: Kerry Donny-Clark Date: Tue, 4 Jan 2022 15:50:54 -0500 Subject: [PATCH 003/243] Adds initial tests --- sdks/node-ts/package-lock.json | 1562 ++++++++++++++++++++++++++++++++ sdks/node-ts/package.json | 4 + sdks/node-ts/test/test.js | 8 + 3 files changed, 1574 insertions(+) create mode 100644 sdks/node-ts/test/test.js diff --git a/sdks/node-ts/package-lock.json b/sdks/node-ts/package-lock.json index c6f606415785..d242213ed8e0 100644 --- a/sdks/node-ts/package-lock.json +++ b/sdks/node-ts/package-lock.json @@ -5,9 +5,815 @@ "packages": { "": { "devDependencies": { + "mocha": "^9.1.3", "typescript": "^4.5.4" } }, + "node_modules/@ungap/promise-all-settled": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", + "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", + "dev": true + }, + "node_modules/ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "dependencies": { + "fill-range": "^7.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chalk/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/chokidar": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", + "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "dev": true, + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/debug/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, + "bin": { + "flat": "cli.js" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true, + "engines": { + "node": ">=4.x" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true, + "bin": { + "he": "bin/he" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/mocha": { + "version": "9.1.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.1.3.tgz", + "integrity": "sha512-Xcpl9FqXOAYqI3j79pEtHBBnQgVXIhpULjGQa7DVb0Po+VzmSIK9kanAiWLHoRR/dbZ2qpdPshuXr8l1VaHCzw==", + "dev": true, + "dependencies": { + "@ungap/promise-all-settled": "1.1.2", + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.2", + "debug": "4.3.2", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.7", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "3.0.4", + "ms": "2.1.3", + "nanoid": "3.1.25", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "which": "2.0.2", + "workerpool": "6.1.5", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + }, + "bin": { + "_mocha": "bin/_mocha", + "mocha": "bin/mocha" + }, + "engines": { + "node": ">= 12.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/mochajs" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.1.25", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", + "integrity": "sha512-rdwtIXaXCLFAQbnfqDRnI6jaRHp9fTcYBjtFKE8eezcZ7LuLjhUaQGNeMXf1HmRoCH32CLz6XwX0TtxEOS/A3Q==", + "dev": true, + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, "node_modules/typescript": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", @@ -20,14 +826,770 @@ "engines": { "node": ">=4.2.0" } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/workerpool": { + "version": "6.1.5", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.5.tgz", + "integrity": "sha512-XdKkCK0Zqc6w3iTxLckiuJ81tiD/o5rBE/m+nXpRCB+/Sq4DqkfXZ/x0jW02DG1tGsfUGXbTJyZDP+eu67haSw==", + "dev": true + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "dependencies": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } } }, "dependencies": { + "@ungap/promise-all-settled": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", + "integrity": "sha512-sL/cEvJWAnClXw0wHk85/2L0G6Sj8UB0Ctc1TEMbKSsmpRosqhwj9gWgFRZSrBr2f9tiXISwNhCPmlfqUqyb9Q==", + "dev": true + }, + "ansi-colors": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz", + "integrity": "sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==", + "dev": true + }, + "ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true + }, + "ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "requires": { + "color-convert": "^2.0.1" + } + }, + "anymatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.2.tgz", + "integrity": "sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg==", + "dev": true, + "requires": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + } + }, + "argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "binary-extensions": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.2.0.tgz", + "integrity": "sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==", + "dev": true + }, + "brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "requires": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "braces": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", + "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "dev": true, + "requires": { + "fill-range": "^7.0.1" + } + }, + "browser-stdout": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz", + "integrity": "sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw==", + "dev": true + }, + "camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true + }, + "chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "requires": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "dependencies": { + "supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + } + } + }, + "chokidar": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.5.2.tgz", + "integrity": "sha512-ekGhOnNVPgT77r4K/U3GDhu+FQ2S8TnK/s2KbIGXi0SZWuwkZ2QNyfWdZW+TVfn84DpEP7rLeCt2UI6bJ8GwbQ==", + "dev": true, + "requires": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "fsevents": "~2.3.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + } + }, + "cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", + "dev": true + }, + "debug": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.2.tgz", + "integrity": "sha512-mOp8wKcvj7XxC78zLgw/ZA+6TSgkoE2C/ienthhRD298T7UNwAg9diBpLRxC0mOezLl4B0xV7M0cCO6P/O0Xhw==", + "dev": true, + "requires": { + "ms": "2.1.2" + }, + "dependencies": { + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "decamelize": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz", + "integrity": "sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ==", + "dev": true + }, + "diff": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.0.0.tgz", + "integrity": "sha512-/VTCrvm5Z0JGty/BWHljh+BAiw3IK+2j87NGMu8Nwc/f48WoDAC395uomO9ZD117ZOBaHmkX1oyLvkVM/aIT3w==", + "dev": true + }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true + }, + "escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true + }, + "fill-range": { + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", + "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "dev": true, + "requires": { + "to-regex-range": "^5.0.1" + } + }, + "find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "requires": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + } + }, + "flat": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", + "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true + }, + "fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8=", + "dev": true + }, + "fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "optional": true + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true + }, + "glob": { + "version": "7.1.7", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.7.tgz", + "integrity": "sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ==", + "dev": true, + "requires": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.0.4", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + } + }, + "glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, + "growl": { + "version": "1.10.5", + "resolved": "https://registry.npmjs.org/growl/-/growl-1.10.5.tgz", + "integrity": "sha512-qBr4OuELkhPenW6goKVXiv47US3clb3/IbuWF9KNKEijAy9oeHxU9IgzjvJhHkUzhaj7rOUD7+YGWqUjLp5oSA==", + "dev": true + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true + }, + "he": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz", + "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==", + "dev": true + }, + "inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk=", + "dev": true, + "requires": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", + "dev": true + }, + "is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "requires": { + "binary-extensions": "^2.0.0" + } + }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, + "is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true + }, + "is-plain-obj": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz", + "integrity": "sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA==", + "dev": true + }, + "is-unicode-supported": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz", + "integrity": "sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=", + "dev": true + }, + "js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "requires": { + "argparse": "^2.0.1" + } + }, + "locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "requires": { + "p-locate": "^5.0.0" + } + }, + "log-symbols": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz", + "integrity": "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg==", + "dev": true, + "requires": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + } + }, + "minimatch": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz", + "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==", + "dev": true, + "requires": { + "brace-expansion": "^1.1.7" + } + }, + "mocha": { + "version": "9.1.3", + "resolved": "https://registry.npmjs.org/mocha/-/mocha-9.1.3.tgz", + "integrity": "sha512-Xcpl9FqXOAYqI3j79pEtHBBnQgVXIhpULjGQa7DVb0Po+VzmSIK9kanAiWLHoRR/dbZ2qpdPshuXr8l1VaHCzw==", + "dev": true, + "requires": { + "@ungap/promise-all-settled": "1.1.2", + "ansi-colors": "4.1.1", + "browser-stdout": "1.3.1", + "chokidar": "3.5.2", + "debug": "4.3.2", + "diff": "5.0.0", + "escape-string-regexp": "4.0.0", + "find-up": "5.0.0", + "glob": "7.1.7", + "growl": "1.10.5", + "he": "1.2.0", + "js-yaml": "4.1.0", + "log-symbols": "4.1.0", + "minimatch": "3.0.4", + "ms": "2.1.3", + "nanoid": "3.1.25", + "serialize-javascript": "6.0.0", + "strip-json-comments": "3.1.1", + "supports-color": "8.1.1", + "which": "2.0.2", + "workerpool": "6.1.5", + "yargs": "16.2.0", + "yargs-parser": "20.2.4", + "yargs-unparser": "2.0.0" + } + }, + "ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true + }, + "nanoid": { + "version": "3.1.25", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.1.25.tgz", + "integrity": "sha512-rdwtIXaXCLFAQbnfqDRnI6jaRHp9fTcYBjtFKE8eezcZ7LuLjhUaQGNeMXf1HmRoCH32CLz6XwX0TtxEOS/A3Q==", + "dev": true + }, + "normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true + }, + "once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha1-WDsap3WWHUsROsF9nFC6753Xa9E=", + "dev": true, + "requires": { + "wrappy": "1" + } + }, + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "requires": { + "p-limit": "^3.0.2" + } + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18=", + "dev": true + }, + "picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "requires": { + "picomatch": "^2.2.1" + } + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "dev": true + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + } + }, + "strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.1" + } + }, + "strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "requires": { + "is-number": "^7.0.0" + } + }, "typescript": { "version": "4.5.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.5.4.tgz", "integrity": "sha512-VgYs2A2QIRuGphtzFV7aQJduJ2gyfTljngLzjpfW9FoYZF6xuw1W0vW9ghCKLfcWrCFxK81CSGRAvS1pn4fIUg==", "dev": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "workerpool": { + "version": "6.1.5", + "resolved": "https://registry.npmjs.org/workerpool/-/workerpool-6.1.5.tgz", + "integrity": "sha512-XdKkCK0Zqc6w3iTxLckiuJ81tiD/o5rBE/m+nXpRCB+/Sq4DqkfXZ/x0jW02DG1tGsfUGXbTJyZDP+eu67haSw==", + "dev": true + }, + "wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "requires": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + } + }, + "wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", + "dev": true + }, + "y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true + }, + "yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "requires": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + } + }, + "yargs-parser": { + "version": "20.2.4", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.4.tgz", + "integrity": "sha512-WOkpgNhPTlE73h4VFAFsOnomJVaovO8VqLDzy5saChRBFQFBoMYirowyW+Q9HB4HFF4Z7VZTiG3iSzJJA29yRA==", + "dev": true + }, + "yargs-unparser": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz", + "integrity": "sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA==", + "dev": true, + "requires": { + "camelcase": "^6.0.0", + "decamelize": "^4.0.0", + "flat": "^5.0.2", + "is-plain-obj": "^2.1.0" + } + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true } } } diff --git a/sdks/node-ts/package.json b/sdks/node-ts/package.json index ea20c48097fc..99048d57ecac 100644 --- a/sdks/node-ts/package.json +++ b/sdks/node-ts/package.json @@ -1,5 +1,9 @@ { "devDependencies": { + "mocha": "^9.1.3", "typescript": "^4.5.4" + }, + "scripts": { + "test": "mocha" } } diff --git a/sdks/node-ts/test/test.js b/sdks/node-ts/test/test.js new file mode 100644 index 000000000000..9306fc5eef47 --- /dev/null +++ b/sdks/node-ts/test/test.js @@ -0,0 +1,8 @@ +var assert = require('assert'); +describe('Array', function() { + describe('#indexOf()', function() { + it('should return -1 when the value is not present', function() { + assert.equal([1, 2, 3].indexOf(4), -1); + }); + }); +}); From b66d6250e6960a2db7d7848eaaa25f771c0e603e Mon Sep 17 00:00:00 2001 From: Kevin Puthusseri Date: Tue, 4 Jan 2022 12:52:35 -0800 Subject: [PATCH 004/243] 'runners' is the correct directory name --- sdks/node-ts/src/apache_beam/{runenrs => runners}/README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename sdks/node-ts/src/apache_beam/{runenrs => runners}/README.md (100%) diff --git a/sdks/node-ts/src/apache_beam/runenrs/README.md b/sdks/node-ts/src/apache_beam/runners/README.md similarity index 100% rename from sdks/node-ts/src/apache_beam/runenrs/README.md rename to sdks/node-ts/src/apache_beam/runners/README.md From 1c9aeff1ce3715bc22a3b4eebb8bc5bb7109ade4 Mon Sep 17 00:00:00 2001 From: Pablo Estrada Date: Tue, 4 Jan 2022 13:21:05 -0800 Subject: [PATCH 005/243] sketching the core API for JS SDK --- sdks/node-ts/src/apache_beam/core.ts | 39 ++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 sdks/node-ts/src/apache_beam/core.ts diff --git a/sdks/node-ts/src/apache_beam/core.ts b/sdks/node-ts/src/apache_beam/core.ts new file mode 100644 index 000000000000..b98c41808657 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/core.ts @@ -0,0 +1,39 @@ +class PValue { + constructor() { + } + + apply(transform: PTransform): PValue { + return transform.expand(this); + } + + map(callable): PValue { + return this.apply(new ParDo(callable)); + } + +} + +class Pipeline extends PValue { + +} + +class PCollection extends PValue { + +} + +class PTransform { + expand(input: PValue): PValue { + throw new Error('Method expand has not been implemented.'); + } +} + +class ParDo extends PTransform { + private doFn; + constructor(callableOrDoFn) { + super() + this.doFn = callableOrDoFn; + } +} + +class DoFn { + +} \ No newline at end of file From 4f86a957ab250f5e645358f570d26d15eeaabd82 Mon Sep 17 00:00:00 2001 From: Jonathan Lui Date: Tue, 4 Jan 2022 13:37:21 -0800 Subject: [PATCH 006/243] add .gitignore for node/ts project --- sdks/node-ts/.gitignore | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 sdks/node-ts/.gitignore diff --git a/sdks/node-ts/.gitignore b/sdks/node-ts/.gitignore new file mode 100644 index 000000000000..d8697235848d --- /dev/null +++ b/sdks/node-ts/.gitignore @@ -0,0 +1,41 @@ +lib-cov +*.seed +*.log +*.csv +*.dat +*.out +*.pid +*.gz +*.swp + +pids +logs +results +tmp + +# Build +public/css/main.css + +# Coverage reports +coverage + +# API keys and secrets +.env + +# Dependency directory +node_modules +bower_components + +# Editors +.idea +*.iml + +# OS metadata +.DS_Store +Thumbs.db + +# Ignore built ts files +dist/**/* + +# ignore yarn.lock +yarn.lock From 90989139113d44b14cce7777e656d1adc0cdee81 Mon Sep 17 00:00:00 2001 From: Robert Bradshaw Date: Tue, 4 Jan 2022 13:49:12 -0800 Subject: [PATCH 007/243] Worker directory. --- sdks/node-ts/src/apache_beam/worker/README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 sdks/node-ts/src/apache_beam/worker/README.md diff --git a/sdks/node-ts/src/apache_beam/worker/README.md b/sdks/node-ts/src/apache_beam/worker/README.md new file mode 100644 index 000000000000..e69de29bb2d1 From 5afeb9bcc985b4ad565f0f3775ede9c3d7f87689 Mon Sep 17 00:00:00 2001 From: Robert Bradshaw Date: Tue, 4 Jan 2022 13:54:24 -0800 Subject: [PATCH 008/243] Fix complile errors with explicit any for callables. --- sdks/node-ts/src/apache_beam/core.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sdks/node-ts/src/apache_beam/core.ts b/sdks/node-ts/src/apache_beam/core.ts index b98c41808657..5794754919ef 100644 --- a/sdks/node-ts/src/apache_beam/core.ts +++ b/sdks/node-ts/src/apache_beam/core.ts @@ -6,7 +6,7 @@ class PValue { return transform.expand(this); } - map(callable): PValue { + map(callable: any): PValue { return this.apply(new ParDo(callable)); } @@ -28,12 +28,12 @@ class PTransform { class ParDo extends PTransform { private doFn; - constructor(callableOrDoFn) { + constructor(callableOrDoFn: any) { super() this.doFn = callableOrDoFn; } } class DoFn { - + } \ No newline at end of file From 2ddd5b01c77a0664b020c2e8f7c65c4e436f0db0 Mon Sep 17 00:00:00 2001 From: Robert Bradshaw Date: Tue, 4 Jan 2022 13:58:16 -0800 Subject: [PATCH 009/243] Add worker entry point. --- sdks/node-ts/package.json | 1 + sdks/node-ts/src/apache_beam/worker/worker.ts | 1 + 2 files changed, 2 insertions(+) create mode 100644 sdks/node-ts/src/apache_beam/worker/worker.ts diff --git a/sdks/node-ts/package.json b/sdks/node-ts/package.json index 99048d57ecac..c7176fdc166c 100644 --- a/sdks/node-ts/package.json +++ b/sdks/node-ts/package.json @@ -4,6 +4,7 @@ "typescript": "^4.5.4" }, "scripts": { + "worker": "node dist/apache_beam/worker/worker.js", "test": "mocha" } } diff --git a/sdks/node-ts/src/apache_beam/worker/worker.ts b/sdks/node-ts/src/apache_beam/worker/worker.ts new file mode 100644 index 000000000000..807165859dad --- /dev/null +++ b/sdks/node-ts/src/apache_beam/worker/worker.ts @@ -0,0 +1 @@ +console.log("Starting the worker."); From fa94f13d55588a3182f3bb2be69ed3cd15c3feb9 Mon Sep 17 00:00:00 2001 From: Robert Bradshaw Date: Tue, 4 Jan 2022 14:17:19 -0800 Subject: [PATCH 010/243] Add proto generation code. --- sdks/node-ts/package-lock.json | 126 ++++++++++++++++++ sdks/node-ts/package.json | 3 + sdks/node-ts/src/apache_beam/proto/README.md | 5 + .../src/apache_beam/proto/gen_protos.sh | 22 +++ 4 files changed, 156 insertions(+) create mode 100644 sdks/node-ts/src/apache_beam/proto/README.md create mode 100644 sdks/node-ts/src/apache_beam/proto/gen_protos.sh diff --git a/sdks/node-ts/package-lock.json b/sdks/node-ts/package-lock.json index d242213ed8e0..8713f8246a10 100644 --- a/sdks/node-ts/package-lock.json +++ b/sdks/node-ts/package-lock.json @@ -4,11 +4,84 @@ "requires": true, "packages": { "": { + "dependencies": { + "@protobuf-ts/plugin": "^2.1.0" + }, "devDependencies": { "mocha": "^9.1.3", "typescript": "^4.5.4" } }, + "node_modules/@protobuf-ts/plugin": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.1.0.tgz", + "integrity": "sha512-eAgw03TjJdpYz78wITlePB1vUQiJURf+o8lromgUo2v2LVyuQN7xzLOZGh1aIyVk0JBOYFYiaNED4gn682AP6g==", + "dependencies": { + "@protobuf-ts/plugin-framework": "^2.1.0", + "@protobuf-ts/protoc": "^2.1.0", + "@protobuf-ts/runtime": "^2.1.0", + "@protobuf-ts/runtime-rpc": "^2.1.0", + "typescript": "^3.9" + }, + "bin": { + "protoc-gen-dump": "bin/protoc-gen-dump", + "protoc-gen-ts": "bin/protoc-gen-ts" + } + }, + "node_modules/@protobuf-ts/plugin-framework": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.1.0.tgz", + "integrity": "sha512-GuDKdJqlSZtQYn6LgDh1J95ZnO1pgGDCMKYAIcosFDTI++EuwON6eje1ppI03O8GJ2cF2EoqjgqsXhC6kXhbPQ==", + "dependencies": { + "@protobuf-ts/runtime": "^2.1.0", + "typescript": "^3.9" + } + }, + "node_modules/@protobuf-ts/plugin-framework/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/plugin/node_modules/typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=4.2.0" + } + }, + "node_modules/@protobuf-ts/protoc": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.1.0.tgz", + "integrity": "sha512-lAoGOqE+qy52iL44iE7drkXkmiFXx9VwedaaDf3GPXPPwxjfxN6Ke3vl2MavG+TmgtYPvgpGXy/D5Aey+vecmw==", + "bin": { + "protoc": "protoc.js" + } + }, + "node_modules/@protobuf-ts/runtime": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.1.0.tgz", + "integrity": "sha512-HZwkgJW9SGiE9+0lWKr1X997tmG01/40j+hr9yBVk+hTQcm7Hsf77XhMNtsDjWUOcspG6GBXu8o3g4i3kD5/zQ==" + }, + "node_modules/@protobuf-ts/runtime-rpc": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.1.0.tgz", + "integrity": "sha512-i/q2sV2s3quJ0I+WY5mXKHiiabvkggOuyb+m0sDqN1MQXcJ/S9mZo/KZ0Dc5RcH4SFhg7NiFzsw6O1xOvG7GrQ==", + "dependencies": { + "@protobuf-ts/runtime": "^2.1.0" + } + }, "node_modules/@ungap/promise-all-settled": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", @@ -936,6 +1009,59 @@ } }, "dependencies": { + "@protobuf-ts/plugin": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin/-/plugin-2.1.0.tgz", + "integrity": "sha512-eAgw03TjJdpYz78wITlePB1vUQiJURf+o8lromgUo2v2LVyuQN7xzLOZGh1aIyVk0JBOYFYiaNED4gn682AP6g==", + "requires": { + "@protobuf-ts/plugin-framework": "^2.1.0", + "@protobuf-ts/protoc": "^2.1.0", + "@protobuf-ts/runtime": "^2.1.0", + "@protobuf-ts/runtime-rpc": "^2.1.0", + "typescript": "^3.9" + }, + "dependencies": { + "typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" + } + } + }, + "@protobuf-ts/plugin-framework": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/plugin-framework/-/plugin-framework-2.1.0.tgz", + "integrity": "sha512-GuDKdJqlSZtQYn6LgDh1J95ZnO1pgGDCMKYAIcosFDTI++EuwON6eje1ppI03O8GJ2cF2EoqjgqsXhC6kXhbPQ==", + "requires": { + "@protobuf-ts/runtime": "^2.1.0", + "typescript": "^3.9" + }, + "dependencies": { + "typescript": { + "version": "3.9.10", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.9.10.tgz", + "integrity": "sha512-w6fIxVE/H1PkLKcCPsFqKE7Kv7QUwhU8qQY2MueZXWx5cPZdwFupLgKK3vntcK98BtNHZtAF4LA/yl2a7k8R6Q==" + } + } + }, + "@protobuf-ts/protoc": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/protoc/-/protoc-2.1.0.tgz", + "integrity": "sha512-lAoGOqE+qy52iL44iE7drkXkmiFXx9VwedaaDf3GPXPPwxjfxN6Ke3vl2MavG+TmgtYPvgpGXy/D5Aey+vecmw==" + }, + "@protobuf-ts/runtime": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime/-/runtime-2.1.0.tgz", + "integrity": "sha512-HZwkgJW9SGiE9+0lWKr1X997tmG01/40j+hr9yBVk+hTQcm7Hsf77XhMNtsDjWUOcspG6GBXu8o3g4i3kD5/zQ==" + }, + "@protobuf-ts/runtime-rpc": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@protobuf-ts/runtime-rpc/-/runtime-rpc-2.1.0.tgz", + "integrity": "sha512-i/q2sV2s3quJ0I+WY5mXKHiiabvkggOuyb+m0sDqN1MQXcJ/S9mZo/KZ0Dc5RcH4SFhg7NiFzsw6O1xOvG7GrQ==", + "requires": { + "@protobuf-ts/runtime": "^2.1.0" + } + }, "@ungap/promise-all-settled": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@ungap/promise-all-settled/-/promise-all-settled-1.1.2.tgz", diff --git a/sdks/node-ts/package.json b/sdks/node-ts/package.json index c7176fdc166c..497b5b0bf7e2 100644 --- a/sdks/node-ts/package.json +++ b/sdks/node-ts/package.json @@ -6,5 +6,8 @@ "scripts": { "worker": "node dist/apache_beam/worker/worker.js", "test": "mocha" + }, + "dependencies": { + "@protobuf-ts/plugin": "^2.1.0" } } diff --git a/sdks/node-ts/src/apache_beam/proto/README.md b/sdks/node-ts/src/apache_beam/proto/README.md new file mode 100644 index 000000000000..8bafb0145df1 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/README.md @@ -0,0 +1,5 @@ +Generated typescript apache beam protos. + +For now, these are just generated and checked in. + +TODO: Automate this as part of the build process. diff --git a/sdks/node-ts/src/apache_beam/proto/gen_protos.sh b/sdks/node-ts/src/apache_beam/proto/gen_protos.sh new file mode 100644 index 000000000000..dfbebf979f0c --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/gen_protos.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +# Once: npm install @protobuf-ts/plugin + +MODEL_PROTOS=../../../../../model + +echo $MODEL_PROTOS +ls $MODEL_PROTOS + +npx protoc --ts_out . \ + --proto_path $MODEL_PROTOS/pipeline/src/main/proto/ \ + $MODEL_PROTOS/pipeline/src/main/proto/*.proto \ + +npx protoc --ts_out . \ + --proto_path $MODEL_PROTOS/pipeline/src/main/proto/ \ + --proto_path $MODEL_PROTOS/job-management/src/main/proto/ \ + $MODEL_PROTOS/job-management/src/main/proto/*.proto \ + +npx protoc --ts_out . \ + --proto_path $MODEL_PROTOS/pipeline/src/main/proto/ \ + --proto_path $MODEL_PROTOS/fn-execution/src/main/proto/ \ + $MODEL_PROTOS/fn-execution/src/main/proto/*.proto \ From 8d9de5f7543491e8711d3250aa5d8674feac01d7 Mon Sep 17 00:00:00 2001 From: Robert Bradshaw Date: Tue, 4 Jan 2022 14:17:36 -0800 Subject: [PATCH 011/243] Add generated proto files. --- .../proto/beam_artifact_api.client.ts | 257 + .../apache_beam/proto/beam_artifact_api.ts | 1389 +++ .../proto/beam_expansion_api.client.ts | 64 + .../apache_beam/proto/beam_expansion_api.ts | 236 + .../apache_beam/proto/beam_fn_api.client.ts | 306 + .../src/apache_beam/proto/beam_fn_api.ts | 5163 +++++++++++ .../apache_beam/proto/beam_job_api.client.ts | 222 + .../src/apache_beam/proto/beam_job_api.ts | 1737 ++++ .../proto/beam_provision_api.client.ts | 72 + .../apache_beam/proto/beam_provision_api.ts | 342 + .../proto/beam_runner_api.client.ts | 64 + .../src/apache_beam/proto/beam_runner_api.ts | 8149 +++++++++++++++++ .../src/apache_beam/proto/endpoints.ts | 185 + .../apache_beam/proto/external_transforms.ts | 378 + sdks/node-ts/src/apache_beam/proto/metrics.ts | 876 ++ sdks/node-ts/src/apache_beam/proto/schema.ts | 1568 ++++ .../apache_beam/proto/standard_window_fns.ts | 358 + 17 files changed, 21366 insertions(+) create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_artifact_api.client.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_artifact_api.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_expansion_api.client.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_expansion_api.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_fn_api.client.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_fn_api.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_job_api.client.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_job_api.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_provision_api.client.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_provision_api.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_runner_api.client.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/beam_runner_api.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/endpoints.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/external_transforms.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/metrics.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/schema.ts create mode 100644 sdks/node-ts/src/apache_beam/proto/standard_window_fns.ts diff --git a/sdks/node-ts/src/apache_beam/proto/beam_artifact_api.client.ts b/sdks/node-ts/src/apache_beam/proto/beam_artifact_api.client.ts new file mode 100644 index 000000000000..9a92998c9590 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_artifact_api.client.ts @@ -0,0 +1,257 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_artifact_api.proto" (package "org.apache.beam.model.job_management.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Artifact API, for communicating with a runner +// for artifact staging and retrieval over GRPC. +// +import { LegacyArtifactRetrievalService } from "./beam_artifact_api"; +import type { ArtifactChunk } from "./beam_artifact_api"; +import type { LegacyGetArtifactRequest } from "./beam_artifact_api"; +import type { GetManifestResponse } from "./beam_artifact_api"; +import type { GetManifestRequest } from "./beam_artifact_api"; +import { LegacyArtifactStagingService } from "./beam_artifact_api"; +import type { CommitManifestResponse } from "./beam_artifact_api"; +import type { CommitManifestRequest } from "./beam_artifact_api"; +import type { PutArtifactResponse } from "./beam_artifact_api"; +import type { PutArtifactRequest } from "./beam_artifact_api"; +import type { ClientStreamingCall } from "@protobuf-ts/runtime-rpc"; +import { ArtifactStagingService } from "./beam_artifact_api"; +import type { ArtifactRequestWrapper } from "./beam_artifact_api"; +import type { ArtifactResponseWrapper } from "./beam_artifact_api"; +import type { DuplexStreamingCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { ArtifactRetrievalService } from "./beam_artifact_api"; +import type { GetArtifactResponse } from "./beam_artifact_api"; +import type { GetArtifactRequest } from "./beam_artifact_api"; +import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { ResolveArtifactsResponse } from "./beam_artifact_api"; +import type { ResolveArtifactsRequest } from "./beam_artifact_api"; +import type { UnaryCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +/** + * A service to retrieve artifacts for use in a Job. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.ArtifactRetrievalService + */ +export interface IArtifactRetrievalServiceClient { + /** + * Resolves the given artifact references into one or more replacement + * artifact references (e.g. a Maven dependency into a (transitive) set + * of jars. + * + * @generated from protobuf rpc: ResolveArtifacts(org.apache.beam.model.job_management.v1.ResolveArtifactsRequest) returns (org.apache.beam.model.job_management.v1.ResolveArtifactsResponse); + */ + resolveArtifacts(input: ResolveArtifactsRequest, options?: RpcOptions): UnaryCall; + /** + * Retrieves the given artifact as a stream of bytes. + * + * @generated from protobuf rpc: GetArtifact(org.apache.beam.model.job_management.v1.GetArtifactRequest) returns (stream org.apache.beam.model.job_management.v1.GetArtifactResponse); + */ + getArtifact(input: GetArtifactRequest, options?: RpcOptions): ServerStreamingCall; +} +/** + * A service to retrieve artifacts for use in a Job. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.ArtifactRetrievalService + */ +export class ArtifactRetrievalServiceClient implements IArtifactRetrievalServiceClient, ServiceInfo { + typeName = ArtifactRetrievalService.typeName; + methods = ArtifactRetrievalService.methods; + options = ArtifactRetrievalService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Resolves the given artifact references into one or more replacement + * artifact references (e.g. a Maven dependency into a (transitive) set + * of jars. + * + * @generated from protobuf rpc: ResolveArtifacts(org.apache.beam.model.job_management.v1.ResolveArtifactsRequest) returns (org.apache.beam.model.job_management.v1.ResolveArtifactsResponse); + */ + resolveArtifacts(input: ResolveArtifactsRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Retrieves the given artifact as a stream of bytes. + * + * @generated from protobuf rpc: GetArtifact(org.apache.beam.model.job_management.v1.GetArtifactRequest) returns (stream org.apache.beam.model.job_management.v1.GetArtifactResponse); + */ + getArtifact(input: GetArtifactRequest, options?: RpcOptions): ServerStreamingCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("serverStreaming", this._transport, method, opt, input); + } +} +/** + * A service that allows the client to act as an ArtifactRetrievalService, + * for a particular job with the server initiating requests and receiving + * responses. + * + * A client calls the service with an ArtifactResponseWrapper that has the + * staging token set, and thereafter responds to the server's requests. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.ArtifactStagingService + */ +export interface IArtifactStagingServiceClient { + /** + * @generated from protobuf rpc: ReverseArtifactRetrievalService(stream org.apache.beam.model.job_management.v1.ArtifactResponseWrapper) returns (stream org.apache.beam.model.job_management.v1.ArtifactRequestWrapper); + */ + reverseArtifactRetrievalService(options?: RpcOptions): DuplexStreamingCall; +} +/** + * A service that allows the client to act as an ArtifactRetrievalService, + * for a particular job with the server initiating requests and receiving + * responses. + * + * A client calls the service with an ArtifactResponseWrapper that has the + * staging token set, and thereafter responds to the server's requests. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.ArtifactStagingService + */ +export class ArtifactStagingServiceClient implements IArtifactStagingServiceClient, ServiceInfo { + typeName = ArtifactStagingService.typeName; + methods = ArtifactStagingService.methods; + options = ArtifactStagingService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * @generated from protobuf rpc: ReverseArtifactRetrievalService(stream org.apache.beam.model.job_management.v1.ArtifactResponseWrapper) returns (stream org.apache.beam.model.job_management.v1.ArtifactRequestWrapper); + */ + reverseArtifactRetrievalService(options?: RpcOptions): DuplexStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("duplex", this._transport, method, opt); + } +} +// Legacy artifact staging service for pipeline-level artifacts. + +/** + * A service to stage artifacts for use in a Job. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.LegacyArtifactStagingService + */ +export interface ILegacyArtifactStagingServiceClient { + /** + * Stage an artifact to be available during job execution. The first request must contain the + * name of the artifact. All future requests must contain sequential chunks of the content of + * the artifact. + * + * @generated from protobuf rpc: PutArtifact(stream org.apache.beam.model.job_management.v1.PutArtifactRequest) returns (org.apache.beam.model.job_management.v1.PutArtifactResponse); + */ + putArtifact(options?: RpcOptions): ClientStreamingCall; + /** + * Commit the manifest for a Job. All artifacts must have been successfully uploaded + * before this call is made. + * + * Throws error INVALID_ARGUMENT if not all of the members of the manifest are present + * + * @generated from protobuf rpc: CommitManifest(org.apache.beam.model.job_management.v1.CommitManifestRequest) returns (org.apache.beam.model.job_management.v1.CommitManifestResponse); + */ + commitManifest(input: CommitManifestRequest, options?: RpcOptions): UnaryCall; +} +// Legacy artifact staging service for pipeline-level artifacts. + +/** + * A service to stage artifacts for use in a Job. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.LegacyArtifactStagingService + */ +export class LegacyArtifactStagingServiceClient implements ILegacyArtifactStagingServiceClient, ServiceInfo { + typeName = LegacyArtifactStagingService.typeName; + methods = LegacyArtifactStagingService.methods; + options = LegacyArtifactStagingService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Stage an artifact to be available during job execution. The first request must contain the + * name of the artifact. All future requests must contain sequential chunks of the content of + * the artifact. + * + * @generated from protobuf rpc: PutArtifact(stream org.apache.beam.model.job_management.v1.PutArtifactRequest) returns (org.apache.beam.model.job_management.v1.PutArtifactResponse); + */ + putArtifact(options?: RpcOptions): ClientStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("clientStreaming", this._transport, method, opt); + } + /** + * Commit the manifest for a Job. All artifacts must have been successfully uploaded + * before this call is made. + * + * Throws error INVALID_ARGUMENT if not all of the members of the manifest are present + * + * @generated from protobuf rpc: CommitManifest(org.apache.beam.model.job_management.v1.CommitManifestRequest) returns (org.apache.beam.model.job_management.v1.CommitManifestResponse); + */ + commitManifest(input: CommitManifestRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} +/** + * A service to retrieve artifacts for use in a Job. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.LegacyArtifactRetrievalService + */ +export interface ILegacyArtifactRetrievalServiceClient { + /** + * Get the manifest for the job + * + * @generated from protobuf rpc: GetManifest(org.apache.beam.model.job_management.v1.GetManifestRequest) returns (org.apache.beam.model.job_management.v1.GetManifestResponse); + */ + getManifest(input: GetManifestRequest, options?: RpcOptions): UnaryCall; + /** + * Get an artifact staged for the job. The requested artifact must be within the manifest + * + * @generated from protobuf rpc: GetArtifact(org.apache.beam.model.job_management.v1.LegacyGetArtifactRequest) returns (stream org.apache.beam.model.job_management.v1.ArtifactChunk); + */ + getArtifact(input: LegacyGetArtifactRequest, options?: RpcOptions): ServerStreamingCall; +} +/** + * A service to retrieve artifacts for use in a Job. + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.LegacyArtifactRetrievalService + */ +export class LegacyArtifactRetrievalServiceClient implements ILegacyArtifactRetrievalServiceClient, ServiceInfo { + typeName = LegacyArtifactRetrievalService.typeName; + methods = LegacyArtifactRetrievalService.methods; + options = LegacyArtifactRetrievalService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Get the manifest for the job + * + * @generated from protobuf rpc: GetManifest(org.apache.beam.model.job_management.v1.GetManifestRequest) returns (org.apache.beam.model.job_management.v1.GetManifestResponse); + */ + getManifest(input: GetManifestRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Get an artifact staged for the job. The requested artifact must be within the manifest + * + * @generated from protobuf rpc: GetArtifact(org.apache.beam.model.job_management.v1.LegacyGetArtifactRequest) returns (stream org.apache.beam.model.job_management.v1.ArtifactChunk); + */ + getArtifact(input: LegacyGetArtifactRequest, options?: RpcOptions): ServerStreamingCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("serverStreaming", this._transport, method, opt, input); + } +} diff --git a/sdks/node-ts/src/apache_beam/proto/beam_artifact_api.ts b/sdks/node-ts/src/apache_beam/proto/beam_artifact_api.ts new file mode 100644 index 000000000000..2a5334262534 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_artifact_api.ts @@ -0,0 +1,1389 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_artifact_api.proto" (package "org.apache.beam.model.job_management.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Artifact API, for communicating with a runner +// for artifact staging and retrieval over GRPC. +// +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { ArtifactInformation } from "./beam_runner_api"; +/** + * A request for artifact resolution. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ResolveArtifactsRequest + */ +export interface ResolveArtifactsRequest { + /** + * An (ordered) set of artifacts to (jointly) resolve. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ArtifactInformation artifacts = 1; + */ + artifacts: ArtifactInformation[]; + /** + * A set of artifact type urns that are understood by the requester. + * An attempt should be made to resolve the artifacts in terms of these URNs, + * but other URNs may be used as well with the understanding that they must + * be fetch-able as bytes via GetArtifact. + * + * @generated from protobuf field: repeated string preferred_urns = 2; + */ + preferredUrns: string[]; +} +/** + * A response for artifact resolution. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ResolveArtifactsResponse + */ +export interface ResolveArtifactsResponse { + /** + * A full (ordered) set of replacements for the set of requested artifacts, + * preferably in terms of the requested type URNs. If there is no better + * resolution, the original list is returned. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ArtifactInformation replacements = 1; + */ + replacements: ArtifactInformation[]; +} +/** + * A request to get an artifact. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetArtifactRequest + */ +export interface GetArtifactRequest { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ArtifactInformation artifact = 1; + */ + artifact?: ArtifactInformation; +} +/** + * Part of a response to getting an artifact. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetArtifactResponse + */ +export interface GetArtifactResponse { + /** + * @generated from protobuf field: bytes data = 1; + */ + data: Uint8Array; +} +/** + * Wraps an ArtifactRetrievalService request for use in ReverseArtifactRetrievalService. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ArtifactRequestWrapper + */ +export interface ArtifactRequestWrapper { + /** + * @generated from protobuf oneof: request + */ + request: { + oneofKind: "resolveArtifact"; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.ResolveArtifactsRequest resolve_artifact = 1000; + */ + resolveArtifact: ResolveArtifactsRequest; + } | { + oneofKind: "getArtifact"; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.GetArtifactRequest get_artifact = 1001; + */ + getArtifact: GetArtifactRequest; + } | { + oneofKind: undefined; + }; +} +/** + * Wraps an ArtifactRetrievalService response for use in ReverseArtifactRetrievalService. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ArtifactResponseWrapper + */ +export interface ArtifactResponseWrapper { + /** + * A token indicating which job these artifacts are being staged for. + * + * @generated from protobuf field: string staging_token = 1; + */ + stagingToken: string; + /** + * Whether this is the last response for this request (for those responses that + * would typically be terminated by the end of the response stream.) + * + * @generated from protobuf field: bool is_last = 2; + */ + isLast: boolean; + /** + * @generated from protobuf oneof: response + */ + response: { + oneofKind: "resolveArtifactResponse"; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.ResolveArtifactsResponse resolve_artifact_response = 1000; + */ + resolveArtifactResponse: ResolveArtifactsResponse; + } | { + oneofKind: "getArtifactResponse"; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.GetArtifactResponse get_artifact_response = 1001; + */ + getArtifactResponse: GetArtifactResponse; + } | { + oneofKind: undefined; + }; +} +/** + * An artifact identifier and associated metadata. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ArtifactMetadata + */ +export interface ArtifactMetadata { + /** + * (Required) The name of the artifact. + * + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * (Optional) The Unix-like permissions of the artifact + * + * @generated from protobuf field: uint32 permissions = 2; + */ + permissions: number; + /** + * (Optional) The hex-encoded sha256 checksum of the artifact. Used, among other things, by + * harness boot code to validate the integrity of the artifact. + * + * @generated from protobuf field: string sha256 = 4; + */ + sha256: string; +} +/** + * A collection of artifacts. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.Manifest + */ +export interface Manifest { + /** + * @generated from protobuf field: repeated org.apache.beam.model.job_management.v1.ArtifactMetadata artifact = 1; + */ + artifact: ArtifactMetadata[]; +} +/** + * A manifest with location information. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ProxyManifest + */ +export interface ProxyManifest { + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.Manifest manifest = 1; + */ + manifest?: Manifest; + /** + * @generated from protobuf field: repeated org.apache.beam.model.job_management.v1.ProxyManifest.Location location = 2; + */ + location: ProxyManifest_Location[]; +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.ProxyManifest.Location + */ +export interface ProxyManifest_Location { + /** + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * @generated from protobuf field: string uri = 2; + */ + uri: string; +} +/** + * A request to get the manifest of a Job. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetManifestRequest + */ +export interface GetManifestRequest { + /** + * (Required) An opaque token representing the entirety of the staged artifacts. + * Returned in CommitManifestResponse. + * + * @generated from protobuf field: string retrieval_token = 1; + */ + retrievalToken: string; +} +/** + * A response containing a job manifest. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetManifestResponse + */ +export interface GetManifestResponse { + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.Manifest manifest = 1; + */ + manifest?: Manifest; +} +/** + * A request to get an artifact. The artifact must be present in the manifest for the job. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.LegacyGetArtifactRequest + */ +export interface LegacyGetArtifactRequest { + /** + * (Required) The name of the artifact to retrieve. + * + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * (Required) An opaque token representing the entirety of the staged artifacts. + * Returned in CommitManifestResponse. + * + * @generated from protobuf field: string retrieval_token = 2; + */ + retrievalToken: string; +} +/** + * Part of an artifact. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.ArtifactChunk + */ +export interface ArtifactChunk { + /** + * @generated from protobuf field: bytes data = 1; + */ + data: Uint8Array; +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.PutArtifactMetadata + */ +export interface PutArtifactMetadata { + /** + * (Required) A token for artifact staging session. This token can be obtained + * from PrepareJob request in JobService + * + * @generated from protobuf field: string staging_session_token = 1; + */ + stagingSessionToken: string; + /** + * (Required) The Artifact metadata. + * + * @generated from protobuf field: org.apache.beam.model.job_management.v1.ArtifactMetadata metadata = 2; + */ + metadata?: ArtifactMetadata; +} +/** + * A request to stage an artifact. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.PutArtifactRequest + */ +export interface PutArtifactRequest { + /** + * @generated from protobuf oneof: content + */ + content: { + oneofKind: "metadata"; + /** + * The first message in a PutArtifact call must contain this field. + * + * @generated from protobuf field: org.apache.beam.model.job_management.v1.PutArtifactMetadata metadata = 1; + */ + metadata: PutArtifactMetadata; + } | { + oneofKind: "data"; + /** + * A chunk of the artifact. All messages after the first in a PutArtifact call must contain a + * chunk. + * + * @generated from protobuf field: org.apache.beam.model.job_management.v1.ArtifactChunk data = 2; + */ + data: ArtifactChunk; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.PutArtifactResponse + */ +export interface PutArtifactResponse { +} +/** + * A request to commit the manifest for a Job. All artifacts must have been successfully uploaded + * before this call is made. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.CommitManifestRequest + */ +export interface CommitManifestRequest { + /** + * (Required) The manifest to commit. + * + * @generated from protobuf field: org.apache.beam.model.job_management.v1.Manifest manifest = 1; + */ + manifest?: Manifest; + /** + * (Required) A token for artifact staging session. This token can be obtained + * from PrepareJob request in JobService + * + * @generated from protobuf field: string staging_session_token = 2; + */ + stagingSessionToken: string; +} +/** + * The result of committing a manifest. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.CommitManifestResponse + */ +export interface CommitManifestResponse { + /** + * (Required) An opaque token representing the entirety of the staged artifacts. + * This can be used to retrieve the manifest and artifacts from an associated + * LegacyArtifactRetrievalService. + * + * @generated from protobuf field: string retrieval_token = 1; + */ + retrievalToken: string; +} +/** + * @generated from protobuf enum org.apache.beam.model.job_management.v1.CommitManifestResponse.Constants + */ +export enum CommitManifestResponse_Constants { + /** + * Token indicating that no artifacts were staged and therefore no retrieval attempt is necessary. + * + * @generated from protobuf enum value: NO_ARTIFACTS_STAGED_TOKEN = 0; + */ + NO_ARTIFACTS_STAGED_TOKEN = 0 +} +// @generated message type with reflection information, may provide speed optimized methods +class ResolveArtifactsRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ResolveArtifactsRequest", [ + { no: 1, name: "artifacts", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ArtifactInformation }, + { no: 2, name: "preferred_urns", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ResolveArtifactsRequest { + const message = { artifacts: [], preferredUrns: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ResolveArtifactsRequest): ResolveArtifactsRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation artifacts */ 1: + message.artifacts.push(ArtifactInformation.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated string preferred_urns */ 2: + message.preferredUrns.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ResolveArtifactsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation artifacts = 1; */ + for (let i = 0; i < message.artifacts.length; i++) + ArtifactInformation.internalBinaryWrite(message.artifacts[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated string preferred_urns = 2; */ + for (let i = 0; i < message.preferredUrns.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.preferredUrns[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ResolveArtifactsRequest + */ +export const ResolveArtifactsRequest = new ResolveArtifactsRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ResolveArtifactsResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ResolveArtifactsResponse", [ + { no: 1, name: "replacements", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ArtifactInformation } + ]); + } + create(value?: PartialMessage): ResolveArtifactsResponse { + const message = { replacements: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ResolveArtifactsResponse): ResolveArtifactsResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation replacements */ 1: + message.replacements.push(ArtifactInformation.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ResolveArtifactsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation replacements = 1; */ + for (let i = 0; i < message.replacements.length; i++) + ArtifactInformation.internalBinaryWrite(message.replacements[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ResolveArtifactsResponse + */ +export const ResolveArtifactsResponse = new ResolveArtifactsResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetArtifactRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetArtifactRequest", [ + { no: 1, name: "artifact", kind: "message", T: () => ArtifactInformation } + ]); + } + create(value?: PartialMessage): GetArtifactRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetArtifactRequest): GetArtifactRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.ArtifactInformation artifact */ 1: + message.artifact = ArtifactInformation.internalBinaryRead(reader, reader.uint32(), options, message.artifact); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.ArtifactInformation artifact = 1; */ + if (message.artifact) + ArtifactInformation.internalBinaryWrite(message.artifact, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetArtifactRequest + */ +export const GetArtifactRequest = new GetArtifactRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetArtifactResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetArtifactResponse", [ + { no: 1, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): GetArtifactResponse { + const message = { data: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetArtifactResponse): GetArtifactResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes data */ 1: + message.data = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes data = 1; */ + if (message.data.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.data); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetArtifactResponse + */ +export const GetArtifactResponse = new GetArtifactResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactRequestWrapper$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ArtifactRequestWrapper", [ + { no: 1000, name: "resolve_artifact", kind: "message", oneof: "request", T: () => ResolveArtifactsRequest }, + { no: 1001, name: "get_artifact", kind: "message", oneof: "request", T: () => GetArtifactRequest } + ]); + } + create(value?: PartialMessage): ArtifactRequestWrapper { + const message = { request: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactRequestWrapper): ArtifactRequestWrapper { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.ResolveArtifactsRequest resolve_artifact */ 1000: + message.request = { + oneofKind: "resolveArtifact", + resolveArtifact: ResolveArtifactsRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).resolveArtifact) + }; + break; + case /* org.apache.beam.model.job_management.v1.GetArtifactRequest get_artifact */ 1001: + message.request = { + oneofKind: "getArtifact", + getArtifact: GetArtifactRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).getArtifact) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactRequestWrapper, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.ResolveArtifactsRequest resolve_artifact = 1000; */ + if (message.request.oneofKind === "resolveArtifact") + ResolveArtifactsRequest.internalBinaryWrite(message.request.resolveArtifact, writer.tag(1000, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.job_management.v1.GetArtifactRequest get_artifact = 1001; */ + if (message.request.oneofKind === "getArtifact") + GetArtifactRequest.internalBinaryWrite(message.request.getArtifact, writer.tag(1001, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ArtifactRequestWrapper + */ +export const ArtifactRequestWrapper = new ArtifactRequestWrapper$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactResponseWrapper$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ArtifactResponseWrapper", [ + { no: 1, name: "staging_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "is_last", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 1000, name: "resolve_artifact_response", kind: "message", oneof: "response", T: () => ResolveArtifactsResponse }, + { no: 1001, name: "get_artifact_response", kind: "message", oneof: "response", T: () => GetArtifactResponse } + ]); + } + create(value?: PartialMessage): ArtifactResponseWrapper { + const message = { stagingToken: "", isLast: false, response: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactResponseWrapper): ArtifactResponseWrapper { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string staging_token */ 1: + message.stagingToken = reader.string(); + break; + case /* bool is_last */ 2: + message.isLast = reader.bool(); + break; + case /* org.apache.beam.model.job_management.v1.ResolveArtifactsResponse resolve_artifact_response */ 1000: + message.response = { + oneofKind: "resolveArtifactResponse", + resolveArtifactResponse: ResolveArtifactsResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).resolveArtifactResponse) + }; + break; + case /* org.apache.beam.model.job_management.v1.GetArtifactResponse get_artifact_response */ 1001: + message.response = { + oneofKind: "getArtifactResponse", + getArtifactResponse: GetArtifactResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).getArtifactResponse) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactResponseWrapper, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string staging_token = 1; */ + if (message.stagingToken !== "") + writer.tag(1, WireType.LengthDelimited).string(message.stagingToken); + /* bool is_last = 2; */ + if (message.isLast !== false) + writer.tag(2, WireType.Varint).bool(message.isLast); + /* org.apache.beam.model.job_management.v1.ResolveArtifactsResponse resolve_artifact_response = 1000; */ + if (message.response.oneofKind === "resolveArtifactResponse") + ResolveArtifactsResponse.internalBinaryWrite(message.response.resolveArtifactResponse, writer.tag(1000, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.job_management.v1.GetArtifactResponse get_artifact_response = 1001; */ + if (message.response.oneofKind === "getArtifactResponse") + GetArtifactResponse.internalBinaryWrite(message.response.getArtifactResponse, writer.tag(1001, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ArtifactResponseWrapper + */ +export const ArtifactResponseWrapper = new ArtifactResponseWrapper$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactMetadata$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ArtifactMetadata", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "permissions", kind: "scalar", T: 13 /*ScalarType.UINT32*/ }, + { no: 4, name: "sha256", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ArtifactMetadata { + const message = { name: "", permissions: 0, sha256: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactMetadata): ArtifactMetadata { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + case /* uint32 permissions */ 2: + message.permissions = reader.uint32(); + break; + case /* string sha256 */ 4: + message.sha256 = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + /* uint32 permissions = 2; */ + if (message.permissions !== 0) + writer.tag(2, WireType.Varint).uint32(message.permissions); + /* string sha256 = 4; */ + if (message.sha256 !== "") + writer.tag(4, WireType.LengthDelimited).string(message.sha256); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ArtifactMetadata + */ +export const ArtifactMetadata = new ArtifactMetadata$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Manifest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.Manifest", [ + { no: 1, name: "artifact", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ArtifactMetadata } + ]); + } + create(value?: PartialMessage): Manifest { + const message = { artifact: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Manifest): Manifest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.job_management.v1.ArtifactMetadata artifact */ 1: + message.artifact.push(ArtifactMetadata.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Manifest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.job_management.v1.ArtifactMetadata artifact = 1; */ + for (let i = 0; i < message.artifact.length; i++) + ArtifactMetadata.internalBinaryWrite(message.artifact[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.Manifest + */ +export const Manifest = new Manifest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProxyManifest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ProxyManifest", [ + { no: 1, name: "manifest", kind: "message", T: () => Manifest }, + { no: 2, name: "location", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ProxyManifest_Location } + ]); + } + create(value?: PartialMessage): ProxyManifest { + const message = { location: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProxyManifest): ProxyManifest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.Manifest manifest */ 1: + message.manifest = Manifest.internalBinaryRead(reader, reader.uint32(), options, message.manifest); + break; + case /* repeated org.apache.beam.model.job_management.v1.ProxyManifest.Location location */ 2: + message.location.push(ProxyManifest_Location.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProxyManifest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.Manifest manifest = 1; */ + if (message.manifest) + Manifest.internalBinaryWrite(message.manifest, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.job_management.v1.ProxyManifest.Location location = 2; */ + for (let i = 0; i < message.location.length; i++) + ProxyManifest_Location.internalBinaryWrite(message.location[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ProxyManifest + */ +export const ProxyManifest = new ProxyManifest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProxyManifest_Location$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ProxyManifest.Location", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "uri", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ProxyManifest_Location { + const message = { name: "", uri: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProxyManifest_Location): ProxyManifest_Location { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + case /* string uri */ 2: + message.uri = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProxyManifest_Location, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + /* string uri = 2; */ + if (message.uri !== "") + writer.tag(2, WireType.LengthDelimited).string(message.uri); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ProxyManifest.Location + */ +export const ProxyManifest_Location = new ProxyManifest_Location$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetManifestRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetManifestRequest", [ + { no: 1, name: "retrieval_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetManifestRequest { + const message = { retrievalToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetManifestRequest): GetManifestRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string retrieval_token */ 1: + message.retrievalToken = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetManifestRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string retrieval_token = 1; */ + if (message.retrievalToken !== "") + writer.tag(1, WireType.LengthDelimited).string(message.retrievalToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetManifestRequest + */ +export const GetManifestRequest = new GetManifestRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetManifestResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetManifestResponse", [ + { no: 1, name: "manifest", kind: "message", T: () => Manifest } + ]); + } + create(value?: PartialMessage): GetManifestResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetManifestResponse): GetManifestResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.Manifest manifest */ 1: + message.manifest = Manifest.internalBinaryRead(reader, reader.uint32(), options, message.manifest); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetManifestResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.Manifest manifest = 1; */ + if (message.manifest) + Manifest.internalBinaryWrite(message.manifest, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetManifestResponse + */ +export const GetManifestResponse = new GetManifestResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LegacyGetArtifactRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.LegacyGetArtifactRequest", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "retrieval_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): LegacyGetArtifactRequest { + const message = { name: "", retrievalToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LegacyGetArtifactRequest): LegacyGetArtifactRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + case /* string retrieval_token */ 2: + message.retrievalToken = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LegacyGetArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + /* string retrieval_token = 2; */ + if (message.retrievalToken !== "") + writer.tag(2, WireType.LengthDelimited).string(message.retrievalToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.LegacyGetArtifactRequest + */ +export const LegacyGetArtifactRequest = new LegacyGetArtifactRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactChunk$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.ArtifactChunk", [ + { no: 1, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): ArtifactChunk { + const message = { data: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactChunk): ArtifactChunk { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes data */ 1: + message.data = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactChunk, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes data = 1; */ + if (message.data.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.data); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.ArtifactChunk + */ +export const ArtifactChunk = new ArtifactChunk$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PutArtifactMetadata$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PutArtifactMetadata", [ + { no: 1, name: "staging_session_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "metadata", kind: "message", T: () => ArtifactMetadata } + ]); + } + create(value?: PartialMessage): PutArtifactMetadata { + const message = { stagingSessionToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PutArtifactMetadata): PutArtifactMetadata { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string staging_session_token */ 1: + message.stagingSessionToken = reader.string(); + break; + case /* org.apache.beam.model.job_management.v1.ArtifactMetadata metadata */ 2: + message.metadata = ArtifactMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PutArtifactMetadata, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string staging_session_token = 1; */ + if (message.stagingSessionToken !== "") + writer.tag(1, WireType.LengthDelimited).string(message.stagingSessionToken); + /* org.apache.beam.model.job_management.v1.ArtifactMetadata metadata = 2; */ + if (message.metadata) + ArtifactMetadata.internalBinaryWrite(message.metadata, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PutArtifactMetadata + */ +export const PutArtifactMetadata = new PutArtifactMetadata$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PutArtifactRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PutArtifactRequest", [ + { no: 1, name: "metadata", kind: "message", oneof: "content", T: () => PutArtifactMetadata }, + { no: 2, name: "data", kind: "message", oneof: "content", T: () => ArtifactChunk } + ]); + } + create(value?: PartialMessage): PutArtifactRequest { + const message = { content: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PutArtifactRequest): PutArtifactRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.PutArtifactMetadata metadata */ 1: + message.content = { + oneofKind: "metadata", + metadata: PutArtifactMetadata.internalBinaryRead(reader, reader.uint32(), options, (message.content as any).metadata) + }; + break; + case /* org.apache.beam.model.job_management.v1.ArtifactChunk data */ 2: + message.content = { + oneofKind: "data", + data: ArtifactChunk.internalBinaryRead(reader, reader.uint32(), options, (message.content as any).data) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PutArtifactRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.PutArtifactMetadata metadata = 1; */ + if (message.content.oneofKind === "metadata") + PutArtifactMetadata.internalBinaryWrite(message.content.metadata, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.job_management.v1.ArtifactChunk data = 2; */ + if (message.content.oneofKind === "data") + ArtifactChunk.internalBinaryWrite(message.content.data, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PutArtifactRequest + */ +export const PutArtifactRequest = new PutArtifactRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PutArtifactResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PutArtifactResponse", []); + } + create(value?: PartialMessage): PutArtifactResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PutArtifactResponse): PutArtifactResponse { + return target ?? this.create(); + } + internalBinaryWrite(message: PutArtifactResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PutArtifactResponse + */ +export const PutArtifactResponse = new PutArtifactResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CommitManifestRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.CommitManifestRequest", [ + { no: 1, name: "manifest", kind: "message", T: () => Manifest }, + { no: 2, name: "staging_session_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CommitManifestRequest { + const message = { stagingSessionToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CommitManifestRequest): CommitManifestRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.Manifest manifest */ 1: + message.manifest = Manifest.internalBinaryRead(reader, reader.uint32(), options, message.manifest); + break; + case /* string staging_session_token */ 2: + message.stagingSessionToken = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CommitManifestRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.Manifest manifest = 1; */ + if (message.manifest) + Manifest.internalBinaryWrite(message.manifest, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string staging_session_token = 2; */ + if (message.stagingSessionToken !== "") + writer.tag(2, WireType.LengthDelimited).string(message.stagingSessionToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.CommitManifestRequest + */ +export const CommitManifestRequest = new CommitManifestRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CommitManifestResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.CommitManifestResponse", [ + { no: 1, name: "retrieval_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CommitManifestResponse { + const message = { retrievalToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CommitManifestResponse): CommitManifestResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string retrieval_token */ 1: + message.retrievalToken = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CommitManifestResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string retrieval_token = 1; */ + if (message.retrievalToken !== "") + writer.tag(1, WireType.LengthDelimited).string(message.retrievalToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.CommitManifestResponse + */ +export const CommitManifestResponse = new CommitManifestResponse$Type(); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.job_management.v1.ArtifactRetrievalService + */ +export const ArtifactRetrievalService = new ServiceType("org.apache.beam.model.job_management.v1.ArtifactRetrievalService", [ + { name: "ResolveArtifacts", options: {}, I: ResolveArtifactsRequest, O: ResolveArtifactsResponse }, + { name: "GetArtifact", serverStreaming: true, options: {}, I: GetArtifactRequest, O: GetArtifactResponse } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.job_management.v1.ArtifactStagingService + */ +export const ArtifactStagingService = new ServiceType("org.apache.beam.model.job_management.v1.ArtifactStagingService", [ + { name: "ReverseArtifactRetrievalService", serverStreaming: true, clientStreaming: true, options: {}, I: ArtifactResponseWrapper, O: ArtifactRequestWrapper } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.job_management.v1.LegacyArtifactStagingService + */ +export const LegacyArtifactStagingService = new ServiceType("org.apache.beam.model.job_management.v1.LegacyArtifactStagingService", [ + { name: "PutArtifact", clientStreaming: true, options: {}, I: PutArtifactRequest, O: PutArtifactResponse }, + { name: "CommitManifest", options: {}, I: CommitManifestRequest, O: CommitManifestResponse } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.job_management.v1.LegacyArtifactRetrievalService + */ +export const LegacyArtifactRetrievalService = new ServiceType("org.apache.beam.model.job_management.v1.LegacyArtifactRetrievalService", [ + { name: "GetManifest", options: {}, I: GetManifestRequest, O: GetManifestResponse }, + { name: "GetArtifact", serverStreaming: true, options: {}, I: LegacyGetArtifactRequest, O: ArtifactChunk } +]); diff --git a/sdks/node-ts/src/apache_beam/proto/beam_expansion_api.client.ts b/sdks/node-ts/src/apache_beam/proto/beam_expansion_api.client.ts new file mode 100644 index 000000000000..3d20a59a2ec6 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_expansion_api.client.ts @@ -0,0 +1,64 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_expansion_api.proto" (package "org.apache.beam.model.expansion.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Expansion API, an api for expanding +// transforms in a remote SDK. +// +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { ExpansionService } from "./beam_expansion_api"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { ExpansionResponse } from "./beam_expansion_api"; +import type { ExpansionRequest } from "./beam_expansion_api"; +import type { UnaryCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +/** + * Job Service for constructing pipelines + * + * @generated from protobuf service org.apache.beam.model.expansion.v1.ExpansionService + */ +export interface IExpansionServiceClient { + /** + * @generated from protobuf rpc: Expand(org.apache.beam.model.expansion.v1.ExpansionRequest) returns (org.apache.beam.model.expansion.v1.ExpansionResponse); + */ + expand(input: ExpansionRequest, options?: RpcOptions): UnaryCall; +} +/** + * Job Service for constructing pipelines + * + * @generated from protobuf service org.apache.beam.model.expansion.v1.ExpansionService + */ +export class ExpansionServiceClient implements IExpansionServiceClient, ServiceInfo { + typeName = ExpansionService.typeName; + methods = ExpansionService.methods; + options = ExpansionService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * @generated from protobuf rpc: Expand(org.apache.beam.model.expansion.v1.ExpansionRequest) returns (org.apache.beam.model.expansion.v1.ExpansionResponse); + */ + expand(input: ExpansionRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} diff --git a/sdks/node-ts/src/apache_beam/proto/beam_expansion_api.ts b/sdks/node-ts/src/apache_beam/proto/beam_expansion_api.ts new file mode 100644 index 000000000000..13d7ba1f9b9f --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_expansion_api.ts @@ -0,0 +1,236 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_expansion_api.proto" (package "org.apache.beam.model.expansion.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Expansion API, an api for expanding +// transforms in a remote SDK. +// +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { PTransform } from "./beam_runner_api"; +import { Components } from "./beam_runner_api"; +/** + * @generated from protobuf message org.apache.beam.model.expansion.v1.ExpansionRequest + */ +export interface ExpansionRequest { + /** + * Set of components needed to interpret the transform, or which + * may be useful for its expansion. This includes the input + * PCollections (if any) to the to-be-expanded transform, along + * with their coders and windowing strategies. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Components components = 1; + */ + components?: Components; + /** + * The actual PTransform to be expaneded according to its spec. + * Its input should be set, but its subtransforms and outputs + * should not be. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.PTransform transform = 2; + */ + transform?: PTransform; + /** + * A namespace (prefix) to use for the id of any newly created + * components. + * + * @generated from protobuf field: string namespace = 3; + */ + namespace: string; +} +/** + * @generated from protobuf message org.apache.beam.model.expansion.v1.ExpansionResponse + */ +export interface ExpansionResponse { + /** + * Set of components needed to execute the expanded transform, + * including the (original) inputs, outputs, and subtransforms. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Components components = 1; + */ + components?: Components; + /** + * The expanded transform itself, with references to its outputs + * and subtransforms. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.PTransform transform = 2; + */ + transform?: PTransform; + /** + * A set of requirements that must be appended to this pipeline's + * requirements. + * + * @generated from protobuf field: repeated string requirements = 3; + */ + requirements: string[]; + /** + * (Optional) An string representation of any error encountered while + * attempting to expand this transform. + * + * @generated from protobuf field: string error = 10; + */ + error: string; +} +// @generated message type with reflection information, may provide speed optimized methods +class ExpansionRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.expansion.v1.ExpansionRequest", [ + { no: 1, name: "components", kind: "message", T: () => Components }, + { no: 2, name: "transform", kind: "message", T: () => PTransform }, + { no: 3, name: "namespace", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ExpansionRequest { + const message = { namespace: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExpansionRequest): ExpansionRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Components components */ 1: + message.components = Components.internalBinaryRead(reader, reader.uint32(), options, message.components); + break; + case /* org.apache.beam.model.pipeline.v1.PTransform transform */ 2: + message.transform = PTransform.internalBinaryRead(reader, reader.uint32(), options, message.transform); + break; + case /* string namespace */ 3: + message.namespace = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExpansionRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Components components = 1; */ + if (message.components) + Components.internalBinaryWrite(message.components, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.PTransform transform = 2; */ + if (message.transform) + PTransform.internalBinaryWrite(message.transform, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* string namespace = 3; */ + if (message.namespace !== "") + writer.tag(3, WireType.LengthDelimited).string(message.namespace); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.expansion.v1.ExpansionRequest + */ +export const ExpansionRequest = new ExpansionRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExpansionResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.expansion.v1.ExpansionResponse", [ + { no: 1, name: "components", kind: "message", T: () => Components }, + { no: 2, name: "transform", kind: "message", T: () => PTransform }, + { no: 3, name: "requirements", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 10, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ExpansionResponse { + const message = { requirements: [], error: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExpansionResponse): ExpansionResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Components components */ 1: + message.components = Components.internalBinaryRead(reader, reader.uint32(), options, message.components); + break; + case /* org.apache.beam.model.pipeline.v1.PTransform transform */ 2: + message.transform = PTransform.internalBinaryRead(reader, reader.uint32(), options, message.transform); + break; + case /* repeated string requirements */ 3: + message.requirements.push(reader.string()); + break; + case /* string error */ 10: + message.error = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExpansionResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Components components = 1; */ + if (message.components) + Components.internalBinaryWrite(message.components, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.PTransform transform = 2; */ + if (message.transform) + PTransform.internalBinaryWrite(message.transform, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* repeated string requirements = 3; */ + for (let i = 0; i < message.requirements.length; i++) + writer.tag(3, WireType.LengthDelimited).string(message.requirements[i]); + /* string error = 10; */ + if (message.error !== "") + writer.tag(10, WireType.LengthDelimited).string(message.error); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.expansion.v1.ExpansionResponse + */ +export const ExpansionResponse = new ExpansionResponse$Type(); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.expansion.v1.ExpansionService + */ +export const ExpansionService = new ServiceType("org.apache.beam.model.expansion.v1.ExpansionService", [ + { name: "Expand", options: {}, I: ExpansionRequest, O: ExpansionResponse } +]); diff --git a/sdks/node-ts/src/apache_beam/proto/beam_fn_api.client.ts b/sdks/node-ts/src/apache_beam/proto/beam_fn_api.client.ts new file mode 100644 index 000000000000..7f235f5330b8 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_fn_api.client.ts @@ -0,0 +1,306 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_fn_api.proto" (package "org.apache.beam.model.fn_execution.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Fn API and boostrapping. +// +// TODO: Usage of plural names in lists looks awkward in Java +// e.g. getOutputsMap, addCodersBuilder +// +// TODO: gRPC / proto field names conflict with generated code +// e.g. "class" in java, "output" in python +// +// +// TODO: Consider consolidating common components in another package +// and language namespaces for re-use with Runner Api. +// +import { BeamFnWorkerStatus } from "./beam_fn_api"; +import type { WorkerStatusRequest } from "./beam_fn_api"; +import type { WorkerStatusResponse } from "./beam_fn_api"; +import { BeamFnExternalWorkerPool } from "./beam_fn_api"; +import type { StopWorkerResponse } from "./beam_fn_api"; +import type { StopWorkerRequest } from "./beam_fn_api"; +import type { StartWorkerResponse } from "./beam_fn_api"; +import type { StartWorkerRequest } from "./beam_fn_api"; +import { BeamFnLogging } from "./beam_fn_api"; +import type { LogControl } from "./beam_fn_api"; +import type { LogEntry_List } from "./beam_fn_api"; +import { BeamFnState } from "./beam_fn_api"; +import type { StateResponse } from "./beam_fn_api"; +import type { StateRequest } from "./beam_fn_api"; +import { BeamFnData } from "./beam_fn_api"; +import type { Elements } from "./beam_fn_api"; +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { BeamFnControl } from "./beam_fn_api"; +import type { ProcessBundleDescriptor } from "./beam_fn_api"; +import type { GetProcessBundleDescriptorRequest } from "./beam_fn_api"; +import type { UnaryCall } from "@protobuf-ts/runtime-rpc"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { InstructionRequest } from "./beam_fn_api"; +import type { InstructionResponse } from "./beam_fn_api"; +import type { DuplexStreamingCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +// +// Control Plane API +// +// Progress reporting and splitting still need further vetting. Also, this may +// change with the addition of new types of instructions/responses related to +// metrics. + +/** + * An API that describes the work that a SDK harness is meant to do. + * Stable + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnControl + */ +export interface IBeamFnControlClient { + /** + * Instructions sent by the runner to the SDK requesting different types + * of work. + * + * @generated from protobuf rpc: Control(stream org.apache.beam.model.fn_execution.v1.InstructionResponse) returns (stream org.apache.beam.model.fn_execution.v1.InstructionRequest); + */ + control(options?: RpcOptions): DuplexStreamingCall; + /** + * Used to get the full process bundle descriptors for bundles one + * is asked to process. + * + * @generated from protobuf rpc: GetProcessBundleDescriptor(org.apache.beam.model.fn_execution.v1.GetProcessBundleDescriptorRequest) returns (org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor); + */ + getProcessBundleDescriptor(input: GetProcessBundleDescriptorRequest, options?: RpcOptions): UnaryCall; +} +// +// Control Plane API +// +// Progress reporting and splitting still need further vetting. Also, this may +// change with the addition of new types of instructions/responses related to +// metrics. + +/** + * An API that describes the work that a SDK harness is meant to do. + * Stable + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnControl + */ +export class BeamFnControlClient implements IBeamFnControlClient, ServiceInfo { + typeName = BeamFnControl.typeName; + methods = BeamFnControl.methods; + options = BeamFnControl.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Instructions sent by the runner to the SDK requesting different types + * of work. + * + * @generated from protobuf rpc: Control(stream org.apache.beam.model.fn_execution.v1.InstructionResponse) returns (stream org.apache.beam.model.fn_execution.v1.InstructionRequest); + */ + control(options?: RpcOptions): DuplexStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("duplex", this._transport, method, opt); + } + /** + * Used to get the full process bundle descriptors for bundles one + * is asked to process. + * + * @generated from protobuf rpc: GetProcessBundleDescriptor(org.apache.beam.model.fn_execution.v1.GetProcessBundleDescriptorRequest) returns (org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor); + */ + getProcessBundleDescriptor(input: GetProcessBundleDescriptorRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} +/** + * Stable + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnData + */ +export interface IBeamFnDataClient { + /** + * Used to send data between harnesses. + * + * @generated from protobuf rpc: Data(stream org.apache.beam.model.fn_execution.v1.Elements) returns (stream org.apache.beam.model.fn_execution.v1.Elements); + */ + data(options?: RpcOptions): DuplexStreamingCall; +} +/** + * Stable + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnData + */ +export class BeamFnDataClient implements IBeamFnDataClient, ServiceInfo { + typeName = BeamFnData.typeName; + methods = BeamFnData.methods; + options = BeamFnData.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Used to send data between harnesses. + * + * @generated from protobuf rpc: Data(stream org.apache.beam.model.fn_execution.v1.Elements) returns (stream org.apache.beam.model.fn_execution.v1.Elements); + */ + data(options?: RpcOptions): DuplexStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("duplex", this._transport, method, opt); + } +} +/** + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnState + */ +export interface IBeamFnStateClient { + /** + * Used to get/append/clear state stored by the runner on behalf of the SDK. + * + * @generated from protobuf rpc: State(stream org.apache.beam.model.fn_execution.v1.StateRequest) returns (stream org.apache.beam.model.fn_execution.v1.StateResponse); + */ + state(options?: RpcOptions): DuplexStreamingCall; +} +/** + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnState + */ +export class BeamFnStateClient implements IBeamFnStateClient, ServiceInfo { + typeName = BeamFnState.typeName; + methods = BeamFnState.methods; + options = BeamFnState.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Used to get/append/clear state stored by the runner on behalf of the SDK. + * + * @generated from protobuf rpc: State(stream org.apache.beam.model.fn_execution.v1.StateRequest) returns (stream org.apache.beam.model.fn_execution.v1.StateResponse); + */ + state(options?: RpcOptions): DuplexStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("duplex", this._transport, method, opt); + } +} +/** + * Stable + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnLogging + */ +export interface IBeamFnLoggingClient { + /** + * Allows for the SDK to emit log entries which the runner can + * associate with the active job. + * + * @generated from protobuf rpc: Logging(stream org.apache.beam.model.fn_execution.v1.LogEntry.List) returns (stream org.apache.beam.model.fn_execution.v1.LogControl); + */ + logging(options?: RpcOptions): DuplexStreamingCall; +} +/** + * Stable + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnLogging + */ +export class BeamFnLoggingClient implements IBeamFnLoggingClient, ServiceInfo { + typeName = BeamFnLogging.typeName; + methods = BeamFnLogging.methods; + options = BeamFnLogging.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Allows for the SDK to emit log entries which the runner can + * associate with the active job. + * + * @generated from protobuf rpc: Logging(stream org.apache.beam.model.fn_execution.v1.LogEntry.List) returns (stream org.apache.beam.model.fn_execution.v1.LogControl); + */ + logging(options?: RpcOptions): DuplexStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("duplex", this._transport, method, opt); + } +} +/** + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnExternalWorkerPool + */ +export interface IBeamFnExternalWorkerPoolClient { + /** + * Start the SDK worker with the given ID. + * + * @generated from protobuf rpc: StartWorker(org.apache.beam.model.fn_execution.v1.StartWorkerRequest) returns (org.apache.beam.model.fn_execution.v1.StartWorkerResponse); + */ + startWorker(input: StartWorkerRequest, options?: RpcOptions): UnaryCall; + /** + * Stop the SDK worker. + * + * @generated from protobuf rpc: StopWorker(org.apache.beam.model.fn_execution.v1.StopWorkerRequest) returns (org.apache.beam.model.fn_execution.v1.StopWorkerResponse); + */ + stopWorker(input: StopWorkerRequest, options?: RpcOptions): UnaryCall; +} +/** + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnExternalWorkerPool + */ +export class BeamFnExternalWorkerPoolClient implements IBeamFnExternalWorkerPoolClient, ServiceInfo { + typeName = BeamFnExternalWorkerPool.typeName; + methods = BeamFnExternalWorkerPool.methods; + options = BeamFnExternalWorkerPool.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Start the SDK worker with the given ID. + * + * @generated from protobuf rpc: StartWorker(org.apache.beam.model.fn_execution.v1.StartWorkerRequest) returns (org.apache.beam.model.fn_execution.v1.StartWorkerResponse); + */ + startWorker(input: StartWorkerRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Stop the SDK worker. + * + * @generated from protobuf rpc: StopWorker(org.apache.beam.model.fn_execution.v1.StopWorkerRequest) returns (org.apache.beam.model.fn_execution.v1.StopWorkerResponse); + */ + stopWorker(input: StopWorkerRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} +/** + * API for SDKs to report debug-related statuses to runner during pipeline execution. + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnWorkerStatus + */ +export interface IBeamFnWorkerStatusClient { + /** + * @generated from protobuf rpc: WorkerStatus(stream org.apache.beam.model.fn_execution.v1.WorkerStatusResponse) returns (stream org.apache.beam.model.fn_execution.v1.WorkerStatusRequest); + */ + workerStatus(options?: RpcOptions): DuplexStreamingCall; +} +/** + * API for SDKs to report debug-related statuses to runner during pipeline execution. + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.BeamFnWorkerStatus + */ +export class BeamFnWorkerStatusClient implements IBeamFnWorkerStatusClient, ServiceInfo { + typeName = BeamFnWorkerStatus.typeName; + methods = BeamFnWorkerStatus.methods; + options = BeamFnWorkerStatus.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * @generated from protobuf rpc: WorkerStatus(stream org.apache.beam.model.fn_execution.v1.WorkerStatusResponse) returns (stream org.apache.beam.model.fn_execution.v1.WorkerStatusRequest); + */ + workerStatus(options?: RpcOptions): DuplexStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("duplex", this._transport, method, opt); + } +} diff --git a/sdks/node-ts/src/apache_beam/proto/beam_fn_api.ts b/sdks/node-ts/src/apache_beam/proto/beam_fn_api.ts new file mode 100644 index 000000000000..cccd9e1ed89a --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_fn_api.ts @@ -0,0 +1,5163 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_fn_api.proto" (package "org.apache.beam.model.fn_execution.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Fn API and boostrapping. +// +// TODO: Usage of plural names in lists looks awkward in Java +// e.g. getOutputsMap, addCodersBuilder +// +// TODO: gRPC / proto field names conflict with generated code +// e.g. "class" in java, "output" in python +// +// +// TODO: Consider consolidating common components in another package +// and language namespaces for re-use with Runner Api. +// +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { MonitoringInfo } from "./metrics"; +import { Duration } from "./google/protobuf/duration"; +import { IsBounded_Enum } from "./beam_runner_api"; +import { Timestamp } from "./google/protobuf/timestamp"; +import { Environment } from "./beam_runner_api"; +import { Coder } from "./beam_runner_api"; +import { WindowingStrategy } from "./beam_runner_api"; +import { PCollection } from "./beam_runner_api"; +import { PTransform } from "./beam_runner_api"; +import { ApiServiceDescriptor } from "./endpoints"; +/** + * A descriptor for connecting to a remote port using the Beam Fn Data API. + * Allows for communication between two environments (for example between the + * runner and the SDK). + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.RemoteGrpcPort + */ +export interface RemoteGrpcPort { + /** + * (Required) An API descriptor which describes where to + * connect to including any authentication that is required. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor api_service_descriptor = 1; + */ + apiServiceDescriptor?: ApiServiceDescriptor; + /** + * (Required) The ID of the Coder that will be used to encode and decode data + * sent over this port. + * + * @generated from protobuf field: string coder_id = 2; + */ + coderId: string; +} +/** + * Requests the ProcessBundleDescriptor with the given id. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.GetProcessBundleDescriptorRequest + */ +export interface GetProcessBundleDescriptorRequest { + /** + * @generated from protobuf field: string process_bundle_descriptor_id = 1; + */ + processBundleDescriptorId: string; +} +/** + * A request sent by a runner which the SDK is asked to fulfill. + * For any unsupported request type, an error should be returned with a + * matching instruction id. + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.InstructionRequest + */ +export interface InstructionRequest { + /** + * (Required) A unique identifier provided by the runner which represents + * this requests execution. The InstructionResponse MUST have the matching id. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; + /** + * @generated from protobuf oneof: request + */ + request: { + oneofKind: "processBundle"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleRequest process_bundle = 1001; + */ + processBundle: ProcessBundleRequest; + } | { + oneofKind: "processBundleProgress"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleProgressRequest process_bundle_progress = 1002; + */ + processBundleProgress: ProcessBundleProgressRequest; + } | { + oneofKind: "processBundleSplit"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest process_bundle_split = 1003; + */ + processBundleSplit: ProcessBundleSplitRequest; + } | { + oneofKind: "finalizeBundle"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.FinalizeBundleRequest finalize_bundle = 1004; + */ + finalizeBundle: FinalizeBundleRequest; + } | { + oneofKind: "monitoringInfos"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataRequest monitoring_infos = 1005; + */ + monitoringInfos: MonitoringInfosMetadataRequest; + } | { + oneofKind: "harnessMonitoringInfos"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosRequest harness_monitoring_infos = 1006; + */ + harnessMonitoringInfos: HarnessMonitoringInfosRequest; + } | { + oneofKind: "register"; + /** + * DEPRECATED + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.RegisterRequest register = 1000; + */ + register: RegisterRequest; + } | { + oneofKind: undefined; + }; +} +/** + * The response for an associated request the SDK had been asked to fulfill. + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.InstructionResponse + */ +export interface InstructionResponse { + /** + * (Required) A reference provided by the runner which represents a requests + * execution. The InstructionResponse MUST have the matching id when + * responding to the runner. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; + /** + * If this is specified, then this instruction has failed. + * A human readable string representing the reason as to why processing has + * failed. + * + * @generated from protobuf field: string error = 2; + */ + error: string; + /** + * @generated from protobuf oneof: response + */ + response: { + oneofKind: "processBundle"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleResponse process_bundle = 1001; + */ + processBundle: ProcessBundleResponse; + } | { + oneofKind: "processBundleProgress"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse process_bundle_progress = 1002; + */ + processBundleProgress: ProcessBundleProgressResponse; + } | { + oneofKind: "processBundleSplit"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse process_bundle_split = 1003; + */ + processBundleSplit: ProcessBundleSplitResponse; + } | { + oneofKind: "finalizeBundle"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.FinalizeBundleResponse finalize_bundle = 1004; + */ + finalizeBundle: FinalizeBundleResponse; + } | { + oneofKind: "monitoringInfos"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse monitoring_infos = 1005; + */ + monitoringInfos: MonitoringInfosMetadataResponse; + } | { + oneofKind: "harnessMonitoringInfos"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse harness_monitoring_infos = 1006; + */ + harnessMonitoringInfos: HarnessMonitoringInfosResponse; + } | { + oneofKind: "register"; + /** + * DEPRECATED + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.RegisterResponse register = 1000; + */ + register: RegisterResponse; + } | { + oneofKind: undefined; + }; +} +/** + * A request to provide full MonitoringInfo associated with the entire SDK + * harness process, not specific to a bundle. + * + * An SDK can report metrics using an identifier that only contains the + * associated payload. A runner who wants to receive the full metrics + * information can request all the monitoring metadata via a + * MonitoringInfosMetadataRequest providing a list of ids as necessary. + * + * The SDK is allowed to reuse the identifiers + * for the lifetime of the associated control connection as long + * as the MonitoringInfo could be reconstructed fully by overwriting its + * payload field with the bytes specified here. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosRequest + */ +export interface HarnessMonitoringInfosRequest { +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse + */ +export interface HarnessMonitoringInfosResponse { + /** + * An identifier to MonitoringInfo.payload mapping containing + * Metrics associated with the SDK harness, not a specific bundle. + * + * An SDK can report metrics using an identifier that only contains the + * associated payload. A runner who wants to receive the full metrics + * information can request all the monitoring metadata via a + * MonitoringInfosMetadataRequest providing a list of ids as necessary. + * + * The SDK is allowed to reuse the identifiers + * for the lifetime of the associated control connection as long + * as the MonitoringInfo could be reconstructed fully by overwriting its + * payload field with the bytes specified here. + * + * @generated from protobuf field: map monitoring_data = 1; + */ + monitoringData: { + [key: string]: Uint8Array; + }; +} +/** + * A list of objects which can be referred to by the runner in + * future requests. + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.RegisterRequest + */ +export interface RegisterRequest { + /** + * (Optional) The set of descriptors used to process bundles. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor process_bundle_descriptor = 1; + */ + processBundleDescriptor: ProcessBundleDescriptor[]; +} +/** + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.RegisterResponse + */ +export interface RegisterResponse { +} +/** + * Definitions that should be used to construct the bundle processing graph. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor + */ +export interface ProcessBundleDescriptor { + /** + * (Required) A pipeline level unique id which can be used as a reference to + * refer to this. + * + * @generated from protobuf field: string id = 1; + */ + id: string; + /** + * (Required) A map from pipeline-scoped id to PTransform. + * + * @generated from protobuf field: map transforms = 2; + */ + transforms: { + [key: string]: PTransform; + }; + /** + * (Required) A map from pipeline-scoped id to PCollection. + * + * @generated from protobuf field: map pcollections = 3; + */ + pcollections: { + [key: string]: PCollection; + }; + /** + * (Required) A map from pipeline-scoped id to WindowingStrategy. + * + * @generated from protobuf field: map windowing_strategies = 4; + */ + windowingStrategies: { + [key: string]: WindowingStrategy; + }; + /** + * (Required) A map from pipeline-scoped id to Coder. + * + * @generated from protobuf field: map coders = 5; + */ + coders: { + [key: string]: Coder; + }; + /** + * (Required) A map from pipeline-scoped id to Environment. + * + * @generated from protobuf field: map environments = 6; + */ + environments: { + [key: string]: Environment; + }; + /** + * A descriptor describing the end point to use for State API + * calls. Required if the Runner intends to send remote references over the + * data plane or if any of the transforms rely on user state or side inputs. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor state_api_service_descriptor = 7; + */ + stateApiServiceDescriptor?: ApiServiceDescriptor; + /** + * A descriptor describing the end point to use for Data API for user timers. + * Required if the ProcessBundleDescriptor contains any transforms that have user timers. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor timer_api_service_descriptor = 8; + */ + timerApiServiceDescriptor?: ApiServiceDescriptor; +} +/** + * One of the applications specifying the scope of work for a bundle. + * See + * https://docs.google.com/document/d/1tUDb45sStdR8u7-jBkGdw3OGFK7aa2-V7eo86zYSE_4/edit#heading=h.9g3g5weg2u9 + * for further details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.BundleApplication + */ +export interface BundleApplication { + /** + * (Required) The transform to which to pass the element + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) Name of the transform's input to which to pass the element. + * + * @generated from protobuf field: string input_id = 2; + */ + inputId: string; + /** + * (Required) The encoded element to pass to the transform. + * + * @generated from protobuf field: bytes element = 3; + */ + element: Uint8Array; + /** + * The map is keyed by the local output name of the PTransform. Each + * value represents a lower bound on the timestamps of elements that + * are produced by this PTransform into each of its output PCollections + * when invoked with this application. + * + * If there is no watermark reported from RestrictionTracker, the runner will + * use MIN_TIMESTAMP by default. + * + * @generated from protobuf field: map output_watermarks = 4; + */ + outputWatermarks: { + [key: string]: Timestamp; + }; + /** + * Whether this application potentially produces an unbounded + * amount of data. Note that this should only be set to BOUNDED if and + * only if the application is known to produce a finite amount of output. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded = 5; + */ + isBounded: IsBounded_Enum; +} +/** + * An Application should be scheduled for execution after a delay. + * Either an absolute timestamp or a relative timestamp can represent a + * scheduled execution time. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.DelayedBundleApplication + */ +export interface DelayedBundleApplication { + /** + * (Required) The application that should be scheduled. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.BundleApplication application = 1; + */ + application?: BundleApplication; + /** + * Recommended time delay at which the application should be scheduled to + * execute by the runner. Time delay that equals 0 may be scheduled to execute + * immediately. The unit of time delay should be microsecond. + * + * @generated from protobuf field: google.protobuf.Duration requested_time_delay = 2; + */ + requestedTimeDelay?: Duration; +} +/** + * A request to process a given bundle. + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest + */ +export interface ProcessBundleRequest { + /** + * (Required) A reference to the process bundle descriptor that must be + * instantiated and executed by the SDK harness. + * + * @generated from protobuf field: string process_bundle_descriptor_id = 1; + */ + processBundleDescriptorId: string; + /** + * (Optional) A list of cache tokens that can be used by an SDK to reuse + * cached data returned by the State API across multiple bundles. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken cache_tokens = 2; + */ + cacheTokens: ProcessBundleRequest_CacheToken[]; + /** + * (Optional) Elements to be processed with the bundle. Either all or + * none of the bundle elements should be included in the ProcessBundleRequest. + * This embedding is to achieve better efficiency for bundles that contain + * only small amounts of data and are cheap to be processed on the SDK harness + * side. This field can be set only if the SDK declares that it supports the + * beam:protocol:control_request_elements_embedding:v1 capability. See more + * at https://s.apache.org/beam-fn-api-control-data-embedding. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.Elements elements = 3; + */ + elements?: Elements; +} +/** + * A cache token which can be used by an SDK to check for the validity + * of cached elements which have a cache token associated. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken + */ +export interface ProcessBundleRequest_CacheToken { + /** + * @generated from protobuf oneof: type + */ + type: { + oneofKind: "userState"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.UserState user_state = 1; + */ + userState: ProcessBundleRequest_CacheToken_UserState; + } | { + oneofKind: "sideInput"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.SideInput side_input = 2; + */ + sideInput: ProcessBundleRequest_CacheToken_SideInput; + } | { + oneofKind: undefined; + }; + /** + * The cache token identifier which should be globally unique. + * + * @generated from protobuf field: bytes token = 10; + */ + token: Uint8Array; +} +/** + * A flag to indicate a cache token is valid for all user state. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.UserState + */ +export interface ProcessBundleRequest_CacheToken_UserState { +} +/** + * A flag to indicate a cache token is valid for a side input. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.SideInput + */ +export interface ProcessBundleRequest_CacheToken_SideInput { + /** + * (Required) The id of the PTransform containing a side input. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the side input. + * + * @generated from protobuf field: string side_input_id = 2; + */ + sideInputId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleResponse + */ +export interface ProcessBundleResponse { + /** + * (Optional) Specifies that the bundle has not been completed and the + * following applications need to be scheduled and executed in the future. + * A runner that does not yet support residual roots MUST still check that + * this is empty for correctness. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.DelayedBundleApplication residual_roots = 2; + */ + residualRoots: DelayedBundleApplication[]; + /** + * DEPRECATED (Required) The list of metrics or other MonitoredState + * collected while processing this bundle. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.MonitoringInfo monitoring_infos = 3; + */ + monitoringInfos: MonitoringInfo[]; + /** + * (Optional) Specifies that the runner must callback to this worker + * once the output of the bundle is committed. The Runner must send a + * FinalizeBundleRequest with the instruction id of the ProcessBundleRequest + * that is related to this ProcessBundleResponse. + * + * @generated from protobuf field: bool requires_finalization = 4; + */ + requiresFinalization: boolean; + /** + * An identifier to MonitoringInfo.payload mapping. + * + * An SDK can report metrics using an identifier that only contains the + * associated payload. A runner who wants to receive the full metrics + * information can request all the monitoring metadata via a + * MonitoringInfosMetadataRequest providing a list of ids as necessary. + * + * The SDK is allowed to reuse the identifiers across multiple bundles as long + * as the MonitoringInfo could be reconstructed fully by overwriting its + * payload field with the bytes specified here. + * + * @generated from protobuf field: map monitoring_data = 5; + */ + monitoringData: { + [key: string]: Uint8Array; + }; + /** + * (Optional) Output elements of the processed bundle. Either all or + * none of the bundle elements should be included in the ProcessBundleResponse. + * This embedding is to achieve better efficiency for bundles that only + * contain small amounts of data. his field can be set only if the runner + * declares that it supports the + * beam:protocol:control_request_elements_embedding:v1 capability. See more at + * https://s.apache.org/beam-fn-api-control-data-embedding. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.Elements elements = 6; + */ + elements?: Elements; +} +/** + * A request to report progress information for a given bundle. + * This is an optional request to be handled and is used to support advanced + * SDK features such as SplittableDoFn, user level metrics etc. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleProgressRequest + */ +export interface ProcessBundleProgressRequest { + /** + * (Required) A reference to an active process bundle request with the given + * instruction id. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; +} +/** + * A request to provide full MonitoringInfo for a set of provided ids. + * + * An SDK can report metrics using an identifier that only contains the + * associated payload. A runner who wants to receive the full metrics + * information can request all the monitoring metadata via a + * MonitoringInfosMetadataRequest providing a list of ids as necessary. + * + * The SDK is allowed to reuse the identifiers for the lifetime of the + * associated control connection as long as the MonitoringInfo could be + * reconstructed fully by overwriting its payload field with the bytes specified + * here. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataRequest + */ +export interface MonitoringInfosMetadataRequest { + /** + * A list of ids for which the full MonitoringInfo is requested for. + * + * @generated from protobuf field: repeated string monitoring_info_id = 1; + */ + monitoringInfoId: string[]; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse + */ +export interface ProcessBundleProgressResponse { + /** + * DEPRECATED (Required) The list of metrics or other MonitoredState + * collected while processing this bundle. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.MonitoringInfo monitoring_infos = 3; + */ + monitoringInfos: MonitoringInfo[]; + /** + * An identifier to MonitoringInfo.payload mapping. + * + * An SDK can report metrics using an identifier that only contains the + * associated payload. A runner who wants to receive the full metrics + * information can request all the monitoring metadata via a + * MonitoringInfosMetadataRequest providing a list of ids as necessary. + * + * The SDK is allowed to reuse the identifiers + * for the lifetime of the associated control connection as long + * as the MonitoringInfo could be reconstructed fully by overwriting its + * payload field with the bytes specified here. + * + * @generated from protobuf field: map monitoring_data = 5; + */ + monitoringData: { + [key: string]: Uint8Array; + }; +} +/** + * A response that contains the full mapping information associated with + * a specified set of identifiers. + * + * An SDK can report metrics using an identifier that only contains the + * associated payload. A runner who wants to receive the full metrics + * information can request all the monitoring metadata via a + * MonitoringInfosMetadataRequest providing a list of ids as necessary. + * + * The SDK is allowed to reuse the identifiers + * for the lifetime of the associated control connection as long + * as the MonitoringInfo could be reconstructed fully by overwriting its + * payload field with the bytes specified here. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse + */ +export interface MonitoringInfosMetadataResponse { + /** + * A mapping from an identifier to the full metrics information. + * + * @generated from protobuf field: map monitoring_info = 1; + */ + monitoringInfo: { + [key: string]: MonitoringInfo; + }; +} +/** + * Represents a request to the SDK to split a currently active bundle. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest + */ +export interface ProcessBundleSplitRequest { + /** + * (Required) A reference to an active process bundle request with the given + * instruction id. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; + /** + * (Required) Specifies the desired split for each transform. + * + * Currently only splits at GRPC read operations are supported. + * This may, of course, limit the amount of work downstream operations + * receive. + * + * @generated from protobuf field: map desired_splits = 3; + */ + desiredSplits: { + [key: string]: ProcessBundleSplitRequest_DesiredSplit; + }; +} +/** + * A message specifying the desired split for a single transform. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest.DesiredSplit + */ +export interface ProcessBundleSplitRequest_DesiredSplit { + /** + * (Required) The fraction of known work remaining in this bundle + * for this transform that should be kept by the SDK after this split. + * + * Set to 0 to "checkpoint" as soon as possible (keeping as little work as + * possible and returning the remainder). + * + * @generated from protobuf field: double fraction_of_remainder = 1; + */ + fractionOfRemainder: number; + /** + * A set of allowed element indices where the SDK may split. When this is + * empty, there are no constraints on where to split. + * + * @generated from protobuf field: repeated int64 allowed_split_points = 3; + */ + allowedSplitPoints: bigint[]; + /** + * (Required for GrpcRead operations) Number of total elements expected + * to be sent to this GrpcRead operation, required to correctly account + * for unreceived data when determining where to split. + * + * @generated from protobuf field: int64 estimated_input_elements = 2; + */ + estimatedInputElements: bigint; +} +/** + * Represents a partition of the bundle: a "primary" and + * a "residual", with the following properties: + * - The work in primary and residual doesn't overlap, and combined, adds up + * to the work in the current bundle if the split hadn't happened. + * - The current bundle, if it keeps executing, will have done none of the + * work under residual_roots. + * - The current bundle, if no further splits happen, will have done exactly + * the work under primary_roots. + * For more rigorous definitions see https://s.apache.org/beam-breaking-fusion + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse + */ +export interface ProcessBundleSplitResponse { + /** + * Root applications that should replace the current bundle. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.BundleApplication primary_roots = 1; + */ + primaryRoots: BundleApplication[]; + /** + * Root applications that have been removed from the current bundle and + * have to be executed in a separate bundle (e.g. in parallel on a different + * worker, or after the current bundle completes, etc.) + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.DelayedBundleApplication residual_roots = 2; + */ + residualRoots: DelayedBundleApplication[]; + /** + * Partitions of input data channels into primary and residual elements, + * if any. Should not include any elements represented in the bundle + * applications roots above. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse.ChannelSplit channel_splits = 3; + */ + channelSplits: ProcessBundleSplitResponse_ChannelSplit[]; +} +/** + * Represents contiguous portions of the data channel that are either + * entirely processed or entirely unprocessed and belong to the primary + * or residual respectively. + * + * This affords both a more efficient representation over the FnAPI + * (if the bundle is large) and often a more efficient representation + * on the runner side (e.g. if the set of elements can be represented + * as some range in an underlying dataset). + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse.ChannelSplit + */ +export interface ProcessBundleSplitResponse_ChannelSplit { + /** + * (Required) The grpc read transform reading this channel. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * The last element of the input channel that should be entirely considered + * part of the primary, identified by its absolute index in the (ordered) + * channel. + * + * @generated from protobuf field: int64 last_primary_element = 2; + */ + lastPrimaryElement: bigint; + /** + * The first element of the input channel that should be entirely considered + * part of the residual, identified by its absolute index in the (ordered) + * channel. + * + * @generated from protobuf field: int64 first_residual_element = 3; + */ + firstResidualElement: bigint; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.FinalizeBundleRequest + */ +export interface FinalizeBundleRequest { + /** + * (Required) A reference to a completed process bundle request with the given + * instruction id. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; +} +/** + * Empty + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.FinalizeBundleResponse + */ +export interface FinalizeBundleResponse { +} +// +// Data Plane API + +/** + * Messages used to represent logical byte streams. + * Stable + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.Elements + */ +export interface Elements { + /** + * (Optional) A list containing parts of logical byte streams. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.Elements.Data data = 1; + */ + data: Elements_Data[]; + /** + * (Optional) A list of timer byte streams. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.Elements.Timers timers = 2; + */ + timers: Elements_Timers[]; +} +/** + * Represents multiple encoded elements in nested context for a given named + * instruction and transform. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.Elements.Data + */ +export interface Elements_Data { + /** + * (Required) A reference to an active instruction request with the given + * instruction id. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; + /** + * (Required) A definition representing a consumer or producer of this data. + * If received by a harness, this represents the consumer within that + * harness that should consume these bytes. If sent by a harness, this + * represents the producer of these bytes. + * + * Note that a single element may span multiple Data messages. + * + * Note that a sending/receiving pair should share the same identifier. + * + * @generated from protobuf field: string transform_id = 2; + */ + transformId: string; + /** + * (Optional) Represents a part of a logical byte stream. Elements within + * the logical byte stream are encoded in the nested context and + * concatenated together. + * + * @generated from protobuf field: bytes data = 3; + */ + data: Uint8Array; + /** + * (Optional) Set this bit to indicate the this is the last data block + * for the given instruction and transform, ending the stream. + * + * @generated from protobuf field: bool is_last = 4; + */ + isLast: boolean; +} +/** + * Represent the encoded user timer for a given instruction, transform and + * timer id. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.Elements.Timers + */ +export interface Elements_Timers { + /** + * (Required) A reference to an active instruction request with the given + * instruction id. + * + * @generated from protobuf field: string instruction_id = 1; + */ + instructionId: string; + /** + * (Required) A definition representing a consumer or producer of this data. + * If received by a harness, this represents the consumer within that + * harness that should consume these timers. If sent by a harness, this + * represents the producer of these timers. + * + * @generated from protobuf field: string transform_id = 2; + */ + transformId: string; + /** + * (Required) The local timer family name used to identify the associated + * timer family specification + * + * @generated from protobuf field: string timer_family_id = 3; + */ + timerFamilyId: string; + /** + * (Optional) Represents a logical byte stream of timers. Encoded according + * to the coder in the timer spec. + * + * @generated from protobuf field: bytes timers = 4; + */ + timers: Uint8Array; + /** + * (Optional) Set this bit to indicate the this is the last data block + * for the given instruction and transform, ending the stream. + * + * @generated from protobuf field: bool is_last = 5; + */ + isLast: boolean; +} +// +// State API + +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateRequest + */ +export interface StateRequest { + /** + * (Required) A unique identifier provided by the SDK which represents this + * requests execution. The StateResponse corresponding with this request + * will have the matching id. + * + * @generated from protobuf field: string id = 1; + */ + id: string; + /** + * (Required) The associated instruction id of the work that is currently + * being processed. This allows for the runner to associate any modifications + * to state to be committed with the appropriate work execution. + * + * @generated from protobuf field: string instruction_id = 2; + */ + instructionId: string; + /** + * (Required) The state key this request is for. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey state_key = 3; + */ + stateKey?: StateKey; + /** + * @generated from protobuf oneof: request + */ + request: { + oneofKind: "get"; + /** + * A request to get state. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateGetRequest get = 1000; + */ + get: StateGetRequest; + } | { + oneofKind: "append"; + /** + * A request to append to state. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateAppendRequest append = 1001; + */ + append: StateAppendRequest; + } | { + oneofKind: "clear"; + /** + * A request to clear state. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateClearRequest clear = 1002; + */ + clear: StateClearRequest; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateResponse + */ +export interface StateResponse { + /** + * (Required) A reference provided by the SDK which represents a requests + * execution. The StateResponse must have the matching id when responding + * to the SDK. + * + * @generated from protobuf field: string id = 1; + */ + id: string; + /** + * (Optional) If this is specified, then the state request has failed. + * A human readable string representing the reason as to why the request + * failed. + * + * @generated from protobuf field: string error = 2; + */ + error: string; + /** + * @generated from protobuf oneof: response + */ + response: { + oneofKind: "get"; + /** + * A response to getting state. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateGetResponse get = 1000; + */ + get: StateGetResponse; + } | { + oneofKind: "append"; + /** + * A response to appending to state. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateAppendResponse append = 1001; + */ + append: StateAppendResponse; + } | { + oneofKind: "clear"; + /** + * A response to clearing state. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateClearResponse clear = 1002; + */ + clear: StateClearResponse; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey + */ +export interface StateKey { + /** + * @generated from protobuf oneof: type + */ + type: { + oneofKind: "runner"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.Runner runner = 1; + */ + runner: StateKey_Runner; + } | { + oneofKind: "multimapSideInput"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.MultimapSideInput multimap_side_input = 2; + */ + multimapSideInput: StateKey_MultimapSideInput; + } | { + oneofKind: "bagUserState"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.BagUserState bag_user_state = 3; + */ + bagUserState: StateKey_BagUserState; + } | { + oneofKind: "iterableSideInput"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.IterableSideInput iterable_side_input = 4; + */ + iterableSideInput: StateKey_IterableSideInput; + } | { + oneofKind: "multimapKeysSideInput"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysSideInput multimap_keys_side_input = 5; + */ + multimapKeysSideInput: StateKey_MultimapKeysSideInput; + } | { + oneofKind: "multimapKeysUserState"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysUserState multimap_keys_user_state = 6; + */ + multimapKeysUserState: StateKey_MultimapKeysUserState; + } | { + oneofKind: "multimapUserState"; + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.StateKey.MultimapUserState multimap_user_state = 7; + */ + multimapUserState: StateKey_MultimapUserState; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.Runner + */ +export interface StateKey_Runner { + /** + * (Required) Opaque information supplied by the runner. Used to support + * remote references. + * https://s.apache.org/beam-fn-api-send-and-receive-data + * + * Used by state backed iterable. And in this use case, request type can + * only be of type get. Details see: + * https://s.apache.org/beam-fn-api-state-backed-iterables + * + * @generated from protobuf field: bytes key = 1; + */ + key: Uint8Array; +} +/** + * Represents a request for the values associated with a specified window + * in a PCollection. See + * https://s.apache.org/beam-fn-state-api-and-bundle-processing for further + * details. + * + * Can only be used to perform StateGetRequests on side inputs of the URN + * beam:side_input:iterable:v1. + * + * For a PCollection, the response data stream will be a concatenation + * of all V's. See https://s.apache.org/beam-fn-api-send-and-receive-data + * for further details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.IterableSideInput + */ +export interface StateKey_IterableSideInput { + /** + * (Required) The id of the PTransform containing a side input. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the side input. + * + * @generated from protobuf field: string side_input_id = 2; + */ + sideInputId: string; + /** + * (Required) The window (after mapping the currently executing elements + * window into the side input windows domain) encoded in a nested context. + * + * @generated from protobuf field: bytes window = 3; + */ + window: Uint8Array; +} +/** + * Represents a request for the values associated with a specified user key + * and window in a PCollection. See + * https://s.apache.org/beam-fn-state-api-and-bundle-processing for further + * details. + * + * Can only be used to perform StateGetRequests on side inputs of the URN + * beam:side_input:multimap:v1. + * + * For a PCollection>, the response data stream will be a + * concatenation of all V's associated with the specified key K. See + * https://s.apache.org/beam-fn-api-send-and-receive-data for further + * details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapSideInput + */ +export interface StateKey_MultimapSideInput { + /** + * (Required) The id of the PTransform containing a side input. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the side input. + * + * @generated from protobuf field: string side_input_id = 2; + */ + sideInputId: string; + /** + * (Required) The window (after mapping the currently executing elements + * window into the side input windows domain) encoded in a nested context. + * + * @generated from protobuf field: bytes window = 3; + */ + window: Uint8Array; + /** + * (Required) The key encoded in a nested context. + * + * @generated from protobuf field: bytes key = 4; + */ + key: Uint8Array; +} +/** + * Represents a request for the keys associated with a specified window in a PCollection. See + * https://s.apache.org/beam-fn-state-api-and-bundle-processing for further + * details. + * + * Can only be used to perform StateGetRequests on side inputs of the URN + * beam:side_input:multimap:v1. + * + * For a PCollection>, the response data stream will be a + * concatenation of all K's associated with the specified window. See + * https://s.apache.org/beam-fn-api-send-and-receive-data for further + * details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysSideInput + */ +export interface StateKey_MultimapKeysSideInput { + /** + * (Required) The id of the PTransform containing a side input. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the side input. + * + * @generated from protobuf field: string side_input_id = 2; + */ + sideInputId: string; + /** + * (Required) The window (after mapping the currently executing elements + * window into the side input windows domain) encoded in a nested context. + * + * @generated from protobuf field: bytes window = 3; + */ + window: Uint8Array; +} +/** + * Represents a request for an unordered set of values associated with a + * specified user key and window for a PTransform. See + * https://s.apache.org/beam-fn-state-api-and-bundle-processing for further + * details. + * + * The response data stream will be a concatenation of all V's associated + * with the specified user key and window. + * See https://s.apache.org/beam-fn-api-send-and-receive-data for further + * details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.BagUserState + */ +export interface StateKey_BagUserState { + /** + * (Required) The id of the PTransform containing user state. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the user state. + * + * @generated from protobuf field: string user_state_id = 2; + */ + userStateId: string; + /** + * (Required) The window encoded in a nested context. + * + * @generated from protobuf field: bytes window = 3; + */ + window: Uint8Array; + /** + * (Required) The key of the currently executing element encoded in a + * nested context. + * + * @generated from protobuf field: bytes key = 4; + */ + key: Uint8Array; +} +/** + * Represents a request for the keys of a multimap associated with a specified + * user key and window for a PTransform. See + * https://s.apache.org/beam-fn-state-api-and-bundle-processing for further + * details. + * + * Can only be used to perform StateGetRequests and StateClearRequests on the + * user state. + * + * The response data stream will be a concatenation of all K's associated + * with the specified user key and window. + * See https://s.apache.org/beam-fn-api-send-and-receive-data for further + * details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysUserState + */ +export interface StateKey_MultimapKeysUserState { + /** + * (Required) The id of the PTransform containing user state. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the user state. + * + * @generated from protobuf field: string user_state_id = 2; + */ + userStateId: string; + /** + * (Required) The window encoded in a nested context. + * + * @generated from protobuf field: bytes window = 3; + */ + window: Uint8Array; + /** + * (Required) The key of the currently executing element encoded in a + * nested context. + * + * @generated from protobuf field: bytes key = 4; + */ + key: Uint8Array; +} +/** + * Represents a request for the values of the map key associated with a + * specified user key and window for a PTransform. See + * https://s.apache.org/beam-fn-state-api-and-bundle-processing for further + * details. + * + * The response data stream will be a concatenation of all V's associated + * with the specified map key, user key, and window. + * See https://s.apache.org/beam-fn-api-send-and-receive-data for further + * details. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapUserState + */ +export interface StateKey_MultimapUserState { + /** + * (Required) The id of the PTransform containing user state. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The id of the user state. + * + * @generated from protobuf field: string user_state_id = 2; + */ + userStateId: string; + /** + * (Required) The window encoded in a nested context. + * + * @generated from protobuf field: bytes window = 3; + */ + window: Uint8Array; + /** + * (Required) The key of the currently executing element encoded in a + * nested context. + * + * @generated from protobuf field: bytes key = 4; + */ + key: Uint8Array; + /** + * (Required) The map key encoded in a nested context. + * + * @generated from protobuf field: bytes map_key = 5; + */ + mapKey: Uint8Array; +} +/** + * A request to get state. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateGetRequest + */ +export interface StateGetRequest { + /** + * (Optional) If specified, signals to the runner that the response + * should resume from the following continuation token. + * + * If unspecified, signals to the runner that the response should start + * from the beginning of the logical continuable stream. + * + * @generated from protobuf field: bytes continuation_token = 1; + */ + continuationToken: Uint8Array; +} +/** + * A response to get state representing a logical byte stream which can be + * continued using the state API. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateGetResponse + */ +export interface StateGetResponse { + /** + * (Optional) If specified, represents a token which can be used with the + * state API to get the next chunk of this logical byte stream. The end of + * the logical byte stream is signalled by this field being unset. + * + * @generated from protobuf field: bytes continuation_token = 1; + */ + continuationToken: Uint8Array; + /** + * Represents a part of a logical byte stream. Elements within + * the logical byte stream are encoded in the nested context and + * concatenated together. + * + * @generated from protobuf field: bytes data = 2; + */ + data: Uint8Array; +} +/** + * A request to append state. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateAppendRequest + */ +export interface StateAppendRequest { + /** + * Represents a part of a logical byte stream. Elements within + * the logical byte stream are encoded in the nested context and + * multiple append requests are concatenated together. + * + * @generated from protobuf field: bytes data = 1; + */ + data: Uint8Array; +} +/** + * A response to append state. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateAppendResponse + */ +export interface StateAppendResponse { +} +/** + * A request to clear state. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateClearRequest + */ +export interface StateClearRequest { +} +/** + * A response to clear state. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StateClearResponse + */ +export interface StateClearResponse { +} +// +// Logging API +// +// This is very stable. There can be some changes to how we define a LogEntry, +// to increase/decrease the severity types, the way we format an exception/stack +// trace, or the log site. + +/** + * A log entry + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.LogEntry + */ +export interface LogEntry { + /** + * (Required) The severity of the log statement. + * + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.LogEntry.Severity.Enum severity = 1; + */ + severity: LogEntry_Severity_Enum; + /** + * (Required) The time at which this log statement occurred. + * + * @generated from protobuf field: google.protobuf.Timestamp timestamp = 2; + */ + timestamp?: Timestamp; + /** + * (Required) A human readable message. + * + * @generated from protobuf field: string message = 3; + */ + message: string; + /** + * (Optional) An optional trace of the functions involved. For example, in + * Java this can include multiple causes and multiple suppressed exceptions. + * + * @generated from protobuf field: string trace = 4; + */ + trace: string; + /** + * (Optional) A reference to the instruction this log statement is associated + * with. + * + * @generated from protobuf field: string instruction_id = 5; + */ + instructionId: string; + /** + * (Optional) A reference to the transform this log statement is + * associated with. + * + * @generated from protobuf field: string transform_id = 6; + */ + transformId: string; + /** + * (Optional) Human-readable name of the function or method being invoked, + * with optional context such as the class or package name. The format can + * vary by language. For example: + * qual.if.ied.Class.method (Java) + * dir/package.func (Go) + * module.function (Python) + * file.cc:382 (C++) + * + * @generated from protobuf field: string log_location = 7; + */ + logLocation: string; + /** + * (Optional) The name of the thread this log statement is associated with. + * + * @generated from protobuf field: string thread = 8; + */ + thread: string; +} +/** + * A list of log entries, enables buffering and batching of multiple + * log messages using the logging API. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.LogEntry.List + */ +export interface LogEntry_List { + /** + * (Required) One or or more log messages. + * + * @generated from protobuf field: repeated org.apache.beam.model.fn_execution.v1.LogEntry log_entries = 1; + */ + logEntries: LogEntry[]; +} +/** + * The severity of the event described in a log entry, expressed as one of the + * severity levels listed below. For your reference, the levels are + * assigned the listed numeric values. The effect of using numeric values + * other than those listed is undefined. + * + * If you are writing log entries, you should map other severity encodings to + * one of these standard levels. For example, you might map all of + * Java's FINE, FINER, and FINEST levels to `Severity.DEBUG`. + * + * This list is intentionally not comprehensive; the intent is to provide a + * common set of "good enough" severity levels so that logging front ends + * can provide filtering and searching across log types. Users of the API are + * free not to use all severity levels in their log messages. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.LogEntry.Severity + */ +export interface LogEntry_Severity { +} +/** + * @generated from protobuf enum org.apache.beam.model.fn_execution.v1.LogEntry.Severity.Enum + */ +export enum LogEntry_Severity_Enum { + /** + * Unspecified level information. Will be logged at the TRACE level. + * + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * @generated from protobuf enum value: TRACE = 1; + */ + TRACE = 1, + /** + * Debugging information. + * + * @generated from protobuf enum value: DEBUG = 2; + */ + DEBUG = 2, + /** + * Normal events. + * + * @generated from protobuf enum value: INFO = 3; + */ + INFO = 3, + /** + * Normal but significant events, such as start up, shut down, or + * configuration. + * + * @generated from protobuf enum value: NOTICE = 4; + */ + NOTICE = 4, + /** + * Warning events might cause problems. + * + * @generated from protobuf enum value: WARN = 5; + */ + WARN = 5, + /** + * Error events are likely to cause problems. + * + * @generated from protobuf enum value: ERROR = 6; + */ + ERROR = 6, + /** + * Critical events cause severe problems or brief outages and may + * indicate that a person must take action. + * + * @generated from protobuf enum value: CRITICAL = 7; + */ + CRITICAL = 7 +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.LogControl + */ +export interface LogControl { +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StartWorkerRequest + */ +export interface StartWorkerRequest { + /** + * @generated from protobuf field: string worker_id = 1; + */ + workerId: string; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor control_endpoint = 2; + */ + controlEndpoint?: ApiServiceDescriptor; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor logging_endpoint = 3; + */ + loggingEndpoint?: ApiServiceDescriptor; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_endpoint = 4; + */ + artifactEndpoint?: ApiServiceDescriptor; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor provision_endpoint = 5; + */ + provisionEndpoint?: ApiServiceDescriptor; + /** + * @generated from protobuf field: map params = 10; + */ + params: { + [key: string]: string; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StartWorkerResponse + */ +export interface StartWorkerResponse { + /** + * @generated from protobuf field: string error = 1; + */ + error: string; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StopWorkerRequest + */ +export interface StopWorkerRequest { + /** + * @generated from protobuf field: string worker_id = 1; + */ + workerId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.StopWorkerResponse + */ +export interface StopWorkerResponse { + /** + * @generated from protobuf field: string error = 1; + */ + error: string; +} +/** + * Request from runner to SDK Harness asking for its status. For more details see + * https://s.apache.org/beam-fn-api-harness-status + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.WorkerStatusRequest + */ +export interface WorkerStatusRequest { + /** + * (Required) Unique ID identifying this request. + * + * @generated from protobuf field: string id = 1; + */ + id: string; +} +/** + * Response from SDK Harness to runner containing the debug related status info. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.WorkerStatusResponse + */ +export interface WorkerStatusResponse { + /** + * (Required) Unique ID from the original request. + * + * @generated from protobuf field: string id = 1; + */ + id: string; + /** + * (Optional) Error message if exception encountered generating the status response. + * + * @generated from protobuf field: string error = 2; + */ + error: string; + /** + * (Optional) Status debugging info reported by SDK harness worker. Content and + * format is not strongly enforced but should be print-friendly and + * appropriate as an HTTP response body for end user. For details of the preferred + * info to include in the message see + * https://s.apache.org/beam-fn-api-harness-status + * + * @generated from protobuf field: string status_info = 3; + */ + statusInfo: string; +} +// @generated message type with reflection information, may provide speed optimized methods +class RemoteGrpcPort$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.RemoteGrpcPort", [ + { no: 1, name: "api_service_descriptor", kind: "message", T: () => ApiServiceDescriptor }, + { no: 2, name: "coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): RemoteGrpcPort { + const message = { coderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: RemoteGrpcPort): RemoteGrpcPort { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor api_service_descriptor */ 1: + message.apiServiceDescriptor = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.apiServiceDescriptor); + break; + case /* string coder_id */ 2: + message.coderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: RemoteGrpcPort, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor api_service_descriptor = 1; */ + if (message.apiServiceDescriptor) + ApiServiceDescriptor.internalBinaryWrite(message.apiServiceDescriptor, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string coder_id = 2; */ + if (message.coderId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.coderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.RemoteGrpcPort + */ +export const RemoteGrpcPort = new RemoteGrpcPort$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetProcessBundleDescriptorRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.GetProcessBundleDescriptorRequest", [ + { no: 1, name: "process_bundle_descriptor_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetProcessBundleDescriptorRequest { + const message = { processBundleDescriptorId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetProcessBundleDescriptorRequest): GetProcessBundleDescriptorRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string process_bundle_descriptor_id */ 1: + message.processBundleDescriptorId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetProcessBundleDescriptorRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string process_bundle_descriptor_id = 1; */ + if (message.processBundleDescriptorId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.processBundleDescriptorId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.GetProcessBundleDescriptorRequest + */ +export const GetProcessBundleDescriptorRequest = new GetProcessBundleDescriptorRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class InstructionRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.InstructionRequest", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 1001, name: "process_bundle", kind: "message", oneof: "request", T: () => ProcessBundleRequest }, + { no: 1002, name: "process_bundle_progress", kind: "message", oneof: "request", T: () => ProcessBundleProgressRequest }, + { no: 1003, name: "process_bundle_split", kind: "message", oneof: "request", T: () => ProcessBundleSplitRequest }, + { no: 1004, name: "finalize_bundle", kind: "message", oneof: "request", T: () => FinalizeBundleRequest }, + { no: 1005, name: "monitoring_infos", kind: "message", oneof: "request", T: () => MonitoringInfosMetadataRequest }, + { no: 1006, name: "harness_monitoring_infos", kind: "message", oneof: "request", T: () => HarnessMonitoringInfosRequest }, + { no: 1000, name: "register", kind: "message", oneof: "request", T: () => RegisterRequest } + ]); + } + create(value?: PartialMessage): InstructionRequest { + const message = { instructionId: "", request: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: InstructionRequest): InstructionRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleRequest process_bundle */ 1001: + message.request = { + oneofKind: "processBundle", + processBundle: ProcessBundleRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).processBundle) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleProgressRequest process_bundle_progress */ 1002: + message.request = { + oneofKind: "processBundleProgress", + processBundleProgress: ProcessBundleProgressRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).processBundleProgress) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest process_bundle_split */ 1003: + message.request = { + oneofKind: "processBundleSplit", + processBundleSplit: ProcessBundleSplitRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).processBundleSplit) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.FinalizeBundleRequest finalize_bundle */ 1004: + message.request = { + oneofKind: "finalizeBundle", + finalizeBundle: FinalizeBundleRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).finalizeBundle) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataRequest monitoring_infos */ 1005: + message.request = { + oneofKind: "monitoringInfos", + monitoringInfos: MonitoringInfosMetadataRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).monitoringInfos) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosRequest harness_monitoring_infos */ 1006: + message.request = { + oneofKind: "harnessMonitoringInfos", + harnessMonitoringInfos: HarnessMonitoringInfosRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).harnessMonitoringInfos) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.RegisterRequest register */ 1000: + message.request = { + oneofKind: "register", + register: RegisterRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).register) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: InstructionRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleRequest process_bundle = 1001; */ + if (message.request.oneofKind === "processBundle") + ProcessBundleRequest.internalBinaryWrite(message.request.processBundle, writer.tag(1001, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleProgressRequest process_bundle_progress = 1002; */ + if (message.request.oneofKind === "processBundleProgress") + ProcessBundleProgressRequest.internalBinaryWrite(message.request.processBundleProgress, writer.tag(1002, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest process_bundle_split = 1003; */ + if (message.request.oneofKind === "processBundleSplit") + ProcessBundleSplitRequest.internalBinaryWrite(message.request.processBundleSplit, writer.tag(1003, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.FinalizeBundleRequest finalize_bundle = 1004; */ + if (message.request.oneofKind === "finalizeBundle") + FinalizeBundleRequest.internalBinaryWrite(message.request.finalizeBundle, writer.tag(1004, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataRequest monitoring_infos = 1005; */ + if (message.request.oneofKind === "monitoringInfos") + MonitoringInfosMetadataRequest.internalBinaryWrite(message.request.monitoringInfos, writer.tag(1005, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosRequest harness_monitoring_infos = 1006; */ + if (message.request.oneofKind === "harnessMonitoringInfos") + HarnessMonitoringInfosRequest.internalBinaryWrite(message.request.harnessMonitoringInfos, writer.tag(1006, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.RegisterRequest register = 1000; */ + if (message.request.oneofKind === "register") + RegisterRequest.internalBinaryWrite(message.request.register, writer.tag(1000, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.InstructionRequest + */ +export const InstructionRequest = new InstructionRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class InstructionResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.InstructionResponse", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 1001, name: "process_bundle", kind: "message", oneof: "response", T: () => ProcessBundleResponse }, + { no: 1002, name: "process_bundle_progress", kind: "message", oneof: "response", T: () => ProcessBundleProgressResponse }, + { no: 1003, name: "process_bundle_split", kind: "message", oneof: "response", T: () => ProcessBundleSplitResponse }, + { no: 1004, name: "finalize_bundle", kind: "message", oneof: "response", T: () => FinalizeBundleResponse }, + { no: 1005, name: "monitoring_infos", kind: "message", oneof: "response", T: () => MonitoringInfosMetadataResponse }, + { no: 1006, name: "harness_monitoring_infos", kind: "message", oneof: "response", T: () => HarnessMonitoringInfosResponse }, + { no: 1000, name: "register", kind: "message", oneof: "response", T: () => RegisterResponse } + ]); + } + create(value?: PartialMessage): InstructionResponse { + const message = { instructionId: "", error: "", response: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: InstructionResponse): InstructionResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + case /* string error */ 2: + message.error = reader.string(); + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleResponse process_bundle */ 1001: + message.response = { + oneofKind: "processBundle", + processBundle: ProcessBundleResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).processBundle) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse process_bundle_progress */ 1002: + message.response = { + oneofKind: "processBundleProgress", + processBundleProgress: ProcessBundleProgressResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).processBundleProgress) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse process_bundle_split */ 1003: + message.response = { + oneofKind: "processBundleSplit", + processBundleSplit: ProcessBundleSplitResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).processBundleSplit) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.FinalizeBundleResponse finalize_bundle */ 1004: + message.response = { + oneofKind: "finalizeBundle", + finalizeBundle: FinalizeBundleResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).finalizeBundle) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse monitoring_infos */ 1005: + message.response = { + oneofKind: "monitoringInfos", + monitoringInfos: MonitoringInfosMetadataResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).monitoringInfos) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse harness_monitoring_infos */ 1006: + message.response = { + oneofKind: "harnessMonitoringInfos", + harnessMonitoringInfos: HarnessMonitoringInfosResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).harnessMonitoringInfos) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.RegisterResponse register */ 1000: + message.response = { + oneofKind: "register", + register: RegisterResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).register) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: InstructionResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + /* string error = 2; */ + if (message.error !== "") + writer.tag(2, WireType.LengthDelimited).string(message.error); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleResponse process_bundle = 1001; */ + if (message.response.oneofKind === "processBundle") + ProcessBundleResponse.internalBinaryWrite(message.response.processBundle, writer.tag(1001, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse process_bundle_progress = 1002; */ + if (message.response.oneofKind === "processBundleProgress") + ProcessBundleProgressResponse.internalBinaryWrite(message.response.processBundleProgress, writer.tag(1002, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse process_bundle_split = 1003; */ + if (message.response.oneofKind === "processBundleSplit") + ProcessBundleSplitResponse.internalBinaryWrite(message.response.processBundleSplit, writer.tag(1003, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.FinalizeBundleResponse finalize_bundle = 1004; */ + if (message.response.oneofKind === "finalizeBundle") + FinalizeBundleResponse.internalBinaryWrite(message.response.finalizeBundle, writer.tag(1004, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse monitoring_infos = 1005; */ + if (message.response.oneofKind === "monitoringInfos") + MonitoringInfosMetadataResponse.internalBinaryWrite(message.response.monitoringInfos, writer.tag(1005, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse harness_monitoring_infos = 1006; */ + if (message.response.oneofKind === "harnessMonitoringInfos") + HarnessMonitoringInfosResponse.internalBinaryWrite(message.response.harnessMonitoringInfos, writer.tag(1006, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.RegisterResponse register = 1000; */ + if (message.response.oneofKind === "register") + RegisterResponse.internalBinaryWrite(message.response.register, writer.tag(1000, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.InstructionResponse + */ +export const InstructionResponse = new InstructionResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class HarnessMonitoringInfosRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosRequest", []); + } + create(value?: PartialMessage): HarnessMonitoringInfosRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: HarnessMonitoringInfosRequest): HarnessMonitoringInfosRequest { + return target ?? this.create(); + } + internalBinaryWrite(message: HarnessMonitoringInfosRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosRequest + */ +export const HarnessMonitoringInfosRequest = new HarnessMonitoringInfosRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class HarnessMonitoringInfosResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse", [ + { no: 1, name: "monitoring_data", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 12 /*ScalarType.BYTES*/ } } + ]); + } + create(value?: PartialMessage): HarnessMonitoringInfosResponse { + const message = { monitoringData: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: HarnessMonitoringInfosResponse): HarnessMonitoringInfosResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* map monitoring_data */ 1: + this.binaryReadMap1(message.monitoringData, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap1(map: HarnessMonitoringInfosResponse["monitoringData"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof HarnessMonitoringInfosResponse["monitoringData"] | undefined, val: HarnessMonitoringInfosResponse["monitoringData"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.bytes(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse.monitoring_data"); + } + } + map[key ?? ""] = val ?? new Uint8Array(0); + } + internalBinaryWrite(message: HarnessMonitoringInfosResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* map monitoring_data = 1; */ + for (let k of Object.keys(message.monitoringData)) + writer.tag(1, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).bytes(message.monitoringData[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.HarnessMonitoringInfosResponse + */ +export const HarnessMonitoringInfosResponse = new HarnessMonitoringInfosResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class RegisterRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.RegisterRequest", [ + { no: 1, name: "process_bundle_descriptor", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ProcessBundleDescriptor } + ]); + } + create(value?: PartialMessage): RegisterRequest { + const message = { processBundleDescriptor: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: RegisterRequest): RegisterRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor process_bundle_descriptor */ 1: + message.processBundleDescriptor.push(ProcessBundleDescriptor.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: RegisterRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor process_bundle_descriptor = 1; */ + for (let i = 0; i < message.processBundleDescriptor.length; i++) + ProcessBundleDescriptor.internalBinaryWrite(message.processBundleDescriptor[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.RegisterRequest + */ +export const RegisterRequest = new RegisterRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class RegisterResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.RegisterResponse", []); + } + create(value?: PartialMessage): RegisterResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: RegisterResponse): RegisterResponse { + return target ?? this.create(); + } + internalBinaryWrite(message: RegisterResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.RegisterResponse + */ +export const RegisterResponse = new RegisterResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleDescriptor$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor", [ + { no: 1, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "transforms", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => PTransform } }, + { no: 3, name: "pcollections", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => PCollection } }, + { no: 4, name: "windowing_strategies", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => WindowingStrategy } }, + { no: 5, name: "coders", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => Coder } }, + { no: 6, name: "environments", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => Environment } }, + { no: 7, name: "state_api_service_descriptor", kind: "message", T: () => ApiServiceDescriptor }, + { no: 8, name: "timer_api_service_descriptor", kind: "message", T: () => ApiServiceDescriptor } + ]); + } + create(value?: PartialMessage): ProcessBundleDescriptor { + const message = { id: "", transforms: {}, pcollections: {}, windowingStrategies: {}, coders: {}, environments: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleDescriptor): ProcessBundleDescriptor { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string id */ 1: + message.id = reader.string(); + break; + case /* map transforms */ 2: + this.binaryReadMap2(message.transforms, reader, options); + break; + case /* map pcollections */ 3: + this.binaryReadMap3(message.pcollections, reader, options); + break; + case /* map windowing_strategies */ 4: + this.binaryReadMap4(message.windowingStrategies, reader, options); + break; + case /* map coders */ 5: + this.binaryReadMap5(message.coders, reader, options); + break; + case /* map environments */ 6: + this.binaryReadMap6(message.environments, reader, options); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor state_api_service_descriptor */ 7: + message.stateApiServiceDescriptor = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.stateApiServiceDescriptor); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor timer_api_service_descriptor */ 8: + message.timerApiServiceDescriptor = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.timerApiServiceDescriptor); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap2(map: ProcessBundleDescriptor["transforms"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleDescriptor["transforms"] | undefined, val: ProcessBundleDescriptor["transforms"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = PTransform.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor.transforms"); + } + } + map[key ?? ""] = val ?? PTransform.create(); + } + private binaryReadMap3(map: ProcessBundleDescriptor["pcollections"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleDescriptor["pcollections"] | undefined, val: ProcessBundleDescriptor["pcollections"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = PCollection.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor.pcollections"); + } + } + map[key ?? ""] = val ?? PCollection.create(); + } + private binaryReadMap4(map: ProcessBundleDescriptor["windowingStrategies"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleDescriptor["windowingStrategies"] | undefined, val: ProcessBundleDescriptor["windowingStrategies"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = WindowingStrategy.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor.windowing_strategies"); + } + } + map[key ?? ""] = val ?? WindowingStrategy.create(); + } + private binaryReadMap5(map: ProcessBundleDescriptor["coders"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleDescriptor["coders"] | undefined, val: ProcessBundleDescriptor["coders"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = Coder.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor.coders"); + } + } + map[key ?? ""] = val ?? Coder.create(); + } + private binaryReadMap6(map: ProcessBundleDescriptor["environments"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleDescriptor["environments"] | undefined, val: ProcessBundleDescriptor["environments"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = Environment.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor.environments"); + } + } + map[key ?? ""] = val ?? Environment.create(); + } + internalBinaryWrite(message: ProcessBundleDescriptor, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string id = 1; */ + if (message.id !== "") + writer.tag(1, WireType.LengthDelimited).string(message.id); + /* map transforms = 2; */ + for (let k of Object.keys(message.transforms)) { + writer.tag(2, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + PTransform.internalBinaryWrite(message.transforms[k], writer, options); + writer.join().join(); + } + /* map pcollections = 3; */ + for (let k of Object.keys(message.pcollections)) { + writer.tag(3, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + PCollection.internalBinaryWrite(message.pcollections[k], writer, options); + writer.join().join(); + } + /* map windowing_strategies = 4; */ + for (let k of Object.keys(message.windowingStrategies)) { + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + WindowingStrategy.internalBinaryWrite(message.windowingStrategies[k], writer, options); + writer.join().join(); + } + /* map coders = 5; */ + for (let k of Object.keys(message.coders)) { + writer.tag(5, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + Coder.internalBinaryWrite(message.coders[k], writer, options); + writer.join().join(); + } + /* map environments = 6; */ + for (let k of Object.keys(message.environments)) { + writer.tag(6, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + Environment.internalBinaryWrite(message.environments[k], writer, options); + writer.join().join(); + } + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor state_api_service_descriptor = 7; */ + if (message.stateApiServiceDescriptor) + ApiServiceDescriptor.internalBinaryWrite(message.stateApiServiceDescriptor, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor timer_api_service_descriptor = 8; */ + if (message.timerApiServiceDescriptor) + ApiServiceDescriptor.internalBinaryWrite(message.timerApiServiceDescriptor, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleDescriptor + */ +export const ProcessBundleDescriptor = new ProcessBundleDescriptor$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class BundleApplication$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.BundleApplication", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "input_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "element", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "output_watermarks", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => Timestamp } }, + { no: 5, name: "is_bounded", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.IsBounded.Enum", IsBounded_Enum] } + ]); + } + create(value?: PartialMessage): BundleApplication { + const message = { transformId: "", inputId: "", element: new Uint8Array(0), outputWatermarks: {}, isBounded: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BundleApplication): BundleApplication { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string input_id */ 2: + message.inputId = reader.string(); + break; + case /* bytes element */ 3: + message.element = reader.bytes(); + break; + case /* map output_watermarks */ 4: + this.binaryReadMap4(message.outputWatermarks, reader, options); + break; + case /* org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded */ 5: + message.isBounded = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap4(map: BundleApplication["outputWatermarks"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof BundleApplication["outputWatermarks"] | undefined, val: BundleApplication["outputWatermarks"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = Timestamp.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.BundleApplication.output_watermarks"); + } + } + map[key ?? ""] = val ?? Timestamp.create(); + } + internalBinaryWrite(message: BundleApplication, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string input_id = 2; */ + if (message.inputId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.inputId); + /* bytes element = 3; */ + if (message.element.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.element); + /* map output_watermarks = 4; */ + for (let k of Object.keys(message.outputWatermarks)) { + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + Timestamp.internalBinaryWrite(message.outputWatermarks[k], writer, options); + writer.join().join(); + } + /* org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded = 5; */ + if (message.isBounded !== 0) + writer.tag(5, WireType.Varint).int32(message.isBounded); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.BundleApplication + */ +export const BundleApplication = new BundleApplication$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DelayedBundleApplication$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.DelayedBundleApplication", [ + { no: 1, name: "application", kind: "message", T: () => BundleApplication }, + { no: 2, name: "requested_time_delay", kind: "message", T: () => Duration } + ]); + } + create(value?: PartialMessage): DelayedBundleApplication { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DelayedBundleApplication): DelayedBundleApplication { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.fn_execution.v1.BundleApplication application */ 1: + message.application = BundleApplication.internalBinaryRead(reader, reader.uint32(), options, message.application); + break; + case /* google.protobuf.Duration requested_time_delay */ 2: + message.requestedTimeDelay = Duration.internalBinaryRead(reader, reader.uint32(), options, message.requestedTimeDelay); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DelayedBundleApplication, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.fn_execution.v1.BundleApplication application = 1; */ + if (message.application) + BundleApplication.internalBinaryWrite(message.application, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Duration requested_time_delay = 2; */ + if (message.requestedTimeDelay) + Duration.internalBinaryWrite(message.requestedTimeDelay, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.DelayedBundleApplication + */ +export const DelayedBundleApplication = new DelayedBundleApplication$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleRequest", [ + { no: 1, name: "process_bundle_descriptor_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "cache_tokens", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ProcessBundleRequest_CacheToken }, + { no: 3, name: "elements", kind: "message", T: () => Elements } + ]); + } + create(value?: PartialMessage): ProcessBundleRequest { + const message = { processBundleDescriptorId: "", cacheTokens: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleRequest): ProcessBundleRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string process_bundle_descriptor_id */ 1: + message.processBundleDescriptorId = reader.string(); + break; + case /* repeated org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken cache_tokens */ 2: + message.cacheTokens.push(ProcessBundleRequest_CacheToken.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* org.apache.beam.model.fn_execution.v1.Elements elements */ 3: + message.elements = Elements.internalBinaryRead(reader, reader.uint32(), options, message.elements); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string process_bundle_descriptor_id = 1; */ + if (message.processBundleDescriptorId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.processBundleDescriptorId); + /* repeated org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken cache_tokens = 2; */ + for (let i = 0; i < message.cacheTokens.length; i++) + ProcessBundleRequest_CacheToken.internalBinaryWrite(message.cacheTokens[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.Elements elements = 3; */ + if (message.elements) + Elements.internalBinaryWrite(message.elements, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest + */ +export const ProcessBundleRequest = new ProcessBundleRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleRequest_CacheToken$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken", [ + { no: 1, name: "user_state", kind: "message", oneof: "type", T: () => ProcessBundleRequest_CacheToken_UserState }, + { no: 2, name: "side_input", kind: "message", oneof: "type", T: () => ProcessBundleRequest_CacheToken_SideInput }, + { no: 10, name: "token", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): ProcessBundleRequest_CacheToken { + const message = { type: { oneofKind: undefined }, token: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleRequest_CacheToken): ProcessBundleRequest_CacheToken { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.UserState user_state */ 1: + message.type = { + oneofKind: "userState", + userState: ProcessBundleRequest_CacheToken_UserState.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).userState) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.SideInput side_input */ 2: + message.type = { + oneofKind: "sideInput", + sideInput: ProcessBundleRequest_CacheToken_SideInput.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).sideInput) + }; + break; + case /* bytes token */ 10: + message.token = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleRequest_CacheToken, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.UserState user_state = 1; */ + if (message.type.oneofKind === "userState") + ProcessBundleRequest_CacheToken_UserState.internalBinaryWrite(message.type.userState, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.SideInput side_input = 2; */ + if (message.type.oneofKind === "sideInput") + ProcessBundleRequest_CacheToken_SideInput.internalBinaryWrite(message.type.sideInput, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* bytes token = 10; */ + if (message.token.length) + writer.tag(10, WireType.LengthDelimited).bytes(message.token); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken + */ +export const ProcessBundleRequest_CacheToken = new ProcessBundleRequest_CacheToken$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleRequest_CacheToken_UserState$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.UserState", []); + } + create(value?: PartialMessage): ProcessBundleRequest_CacheToken_UserState { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleRequest_CacheToken_UserState): ProcessBundleRequest_CacheToken_UserState { + return target ?? this.create(); + } + internalBinaryWrite(message: ProcessBundleRequest_CacheToken_UserState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.UserState + */ +export const ProcessBundleRequest_CacheToken_UserState = new ProcessBundleRequest_CacheToken_UserState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleRequest_CacheToken_SideInput$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.SideInput", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "side_input_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ProcessBundleRequest_CacheToken_SideInput { + const message = { transformId: "", sideInputId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleRequest_CacheToken_SideInput): ProcessBundleRequest_CacheToken_SideInput { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string side_input_id */ 2: + message.sideInputId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleRequest_CacheToken_SideInput, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string side_input_id = 2; */ + if (message.sideInputId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.sideInputId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleRequest.CacheToken.SideInput + */ +export const ProcessBundleRequest_CacheToken_SideInput = new ProcessBundleRequest_CacheToken_SideInput$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleResponse", [ + { no: 2, name: "residual_roots", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => DelayedBundleApplication }, + { no: 3, name: "monitoring_infos", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => MonitoringInfo }, + { no: 4, name: "requires_finalization", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 5, name: "monitoring_data", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 12 /*ScalarType.BYTES*/ } }, + { no: 6, name: "elements", kind: "message", T: () => Elements } + ]); + } + create(value?: PartialMessage): ProcessBundleResponse { + const message = { residualRoots: [], monitoringInfos: [], requiresFinalization: false, monitoringData: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleResponse): ProcessBundleResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.fn_execution.v1.DelayedBundleApplication residual_roots */ 2: + message.residualRoots.push(DelayedBundleApplication.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo monitoring_infos */ 3: + message.monitoringInfos.push(MonitoringInfo.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* bool requires_finalization */ 4: + message.requiresFinalization = reader.bool(); + break; + case /* map monitoring_data */ 5: + this.binaryReadMap5(message.monitoringData, reader, options); + break; + case /* org.apache.beam.model.fn_execution.v1.Elements elements */ 6: + message.elements = Elements.internalBinaryRead(reader, reader.uint32(), options, message.elements); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap5(map: ProcessBundleResponse["monitoringData"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleResponse["monitoringData"] | undefined, val: ProcessBundleResponse["monitoringData"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.bytes(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleResponse.monitoring_data"); + } + } + map[key ?? ""] = val ?? new Uint8Array(0); + } + internalBinaryWrite(message: ProcessBundleResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.fn_execution.v1.DelayedBundleApplication residual_roots = 2; */ + for (let i = 0; i < message.residualRoots.length; i++) + DelayedBundleApplication.internalBinaryWrite(message.residualRoots[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo monitoring_infos = 3; */ + for (let i = 0; i < message.monitoringInfos.length; i++) + MonitoringInfo.internalBinaryWrite(message.monitoringInfos[i], writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* bool requires_finalization = 4; */ + if (message.requiresFinalization !== false) + writer.tag(4, WireType.Varint).bool(message.requiresFinalization); + /* map monitoring_data = 5; */ + for (let k of Object.keys(message.monitoringData)) + writer.tag(5, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).bytes(message.monitoringData[k]).join(); + /* org.apache.beam.model.fn_execution.v1.Elements elements = 6; */ + if (message.elements) + Elements.internalBinaryWrite(message.elements, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleResponse + */ +export const ProcessBundleResponse = new ProcessBundleResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleProgressRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleProgressRequest", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ProcessBundleProgressRequest { + const message = { instructionId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleProgressRequest): ProcessBundleProgressRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleProgressRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleProgressRequest + */ +export const ProcessBundleProgressRequest = new ProcessBundleProgressRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfosMetadataRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataRequest", [ + { no: 1, name: "monitoring_info_id", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): MonitoringInfosMetadataRequest { + const message = { monitoringInfoId: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfosMetadataRequest): MonitoringInfosMetadataRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated string monitoring_info_id */ 1: + message.monitoringInfoId.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MonitoringInfosMetadataRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated string monitoring_info_id = 1; */ + for (let i = 0; i < message.monitoringInfoId.length; i++) + writer.tag(1, WireType.LengthDelimited).string(message.monitoringInfoId[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataRequest + */ +export const MonitoringInfosMetadataRequest = new MonitoringInfosMetadataRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleProgressResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse", [ + { no: 3, name: "monitoring_infos", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => MonitoringInfo }, + { no: 5, name: "monitoring_data", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 12 /*ScalarType.BYTES*/ } } + ]); + } + create(value?: PartialMessage): ProcessBundleProgressResponse { + const message = { monitoringInfos: [], monitoringData: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleProgressResponse): ProcessBundleProgressResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo monitoring_infos */ 3: + message.monitoringInfos.push(MonitoringInfo.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* map monitoring_data */ 5: + this.binaryReadMap5(message.monitoringData, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap5(map: ProcessBundleProgressResponse["monitoringData"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleProgressResponse["monitoringData"] | undefined, val: ProcessBundleProgressResponse["monitoringData"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.bytes(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse.monitoring_data"); + } + } + map[key ?? ""] = val ?? new Uint8Array(0); + } + internalBinaryWrite(message: ProcessBundleProgressResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo monitoring_infos = 3; */ + for (let i = 0; i < message.monitoringInfos.length; i++) + MonitoringInfo.internalBinaryWrite(message.monitoringInfos[i], writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* map monitoring_data = 5; */ + for (let k of Object.keys(message.monitoringData)) + writer.tag(5, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).bytes(message.monitoringData[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleProgressResponse + */ +export const ProcessBundleProgressResponse = new ProcessBundleProgressResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfosMetadataResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse", [ + { no: 1, name: "monitoring_info", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => MonitoringInfo } } + ]); + } + create(value?: PartialMessage): MonitoringInfosMetadataResponse { + const message = { monitoringInfo: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfosMetadataResponse): MonitoringInfosMetadataResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* map monitoring_info */ 1: + this.binaryReadMap1(message.monitoringInfo, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap1(map: MonitoringInfosMetadataResponse["monitoringInfo"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof MonitoringInfosMetadataResponse["monitoringInfo"] | undefined, val: MonitoringInfosMetadataResponse["monitoringInfo"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = MonitoringInfo.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse.monitoring_info"); + } + } + map[key ?? ""] = val ?? MonitoringInfo.create(); + } + internalBinaryWrite(message: MonitoringInfosMetadataResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* map monitoring_info = 1; */ + for (let k of Object.keys(message.monitoringInfo)) { + writer.tag(1, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + MonitoringInfo.internalBinaryWrite(message.monitoringInfo[k], writer, options); + writer.join().join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.MonitoringInfosMetadataResponse + */ +export const MonitoringInfosMetadataResponse = new MonitoringInfosMetadataResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleSplitRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "desired_splits", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => ProcessBundleSplitRequest_DesiredSplit } } + ]); + } + create(value?: PartialMessage): ProcessBundleSplitRequest { + const message = { instructionId: "", desiredSplits: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleSplitRequest): ProcessBundleSplitRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + case /* map desired_splits */ 3: + this.binaryReadMap3(message.desiredSplits, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap3(map: ProcessBundleSplitRequest["desiredSplits"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessBundleSplitRequest["desiredSplits"] | undefined, val: ProcessBundleSplitRequest["desiredSplits"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = ProcessBundleSplitRequest_DesiredSplit.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest.desired_splits"); + } + } + map[key ?? ""] = val ?? ProcessBundleSplitRequest_DesiredSplit.create(); + } + internalBinaryWrite(message: ProcessBundleSplitRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + /* map desired_splits = 3; */ + for (let k of Object.keys(message.desiredSplits)) { + writer.tag(3, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + ProcessBundleSplitRequest_DesiredSplit.internalBinaryWrite(message.desiredSplits[k], writer, options); + writer.join().join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest + */ +export const ProcessBundleSplitRequest = new ProcessBundleSplitRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleSplitRequest_DesiredSplit$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest.DesiredSplit", [ + { no: 1, name: "fraction_of_remainder", kind: "scalar", T: 1 /*ScalarType.DOUBLE*/ }, + { no: 3, name: "allowed_split_points", kind: "scalar", repeat: 1 /*RepeatType.PACKED*/, T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "estimated_input_elements", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): ProcessBundleSplitRequest_DesiredSplit { + const message = { fractionOfRemainder: 0, allowedSplitPoints: [], estimatedInputElements: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleSplitRequest_DesiredSplit): ProcessBundleSplitRequest_DesiredSplit { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* double fraction_of_remainder */ 1: + message.fractionOfRemainder = reader.double(); + break; + case /* repeated int64 allowed_split_points */ 3: + if (wireType === WireType.LengthDelimited) + for (let e = reader.int32() + reader.pos; reader.pos < e;) + message.allowedSplitPoints.push(reader.int64().toBigInt()); + else + message.allowedSplitPoints.push(reader.int64().toBigInt()); + break; + case /* int64 estimated_input_elements */ 2: + message.estimatedInputElements = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleSplitRequest_DesiredSplit, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* double fraction_of_remainder = 1; */ + if (message.fractionOfRemainder !== 0) + writer.tag(1, WireType.Bit64).double(message.fractionOfRemainder); + /* repeated int64 allowed_split_points = 3; */ + if (message.allowedSplitPoints.length) { + writer.tag(3, WireType.LengthDelimited).fork(); + for (let i = 0; i < message.allowedSplitPoints.length; i++) + writer.int64(message.allowedSplitPoints[i]); + writer.join(); + } + /* int64 estimated_input_elements = 2; */ + if (message.estimatedInputElements !== 0n) + writer.tag(2, WireType.Varint).int64(message.estimatedInputElements); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitRequest.DesiredSplit + */ +export const ProcessBundleSplitRequest_DesiredSplit = new ProcessBundleSplitRequest_DesiredSplit$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleSplitResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse", [ + { no: 1, name: "primary_roots", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => BundleApplication }, + { no: 2, name: "residual_roots", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => DelayedBundleApplication }, + { no: 3, name: "channel_splits", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ProcessBundleSplitResponse_ChannelSplit } + ]); + } + create(value?: PartialMessage): ProcessBundleSplitResponse { + const message = { primaryRoots: [], residualRoots: [], channelSplits: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleSplitResponse): ProcessBundleSplitResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.fn_execution.v1.BundleApplication primary_roots */ 1: + message.primaryRoots.push(BundleApplication.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.fn_execution.v1.DelayedBundleApplication residual_roots */ 2: + message.residualRoots.push(DelayedBundleApplication.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse.ChannelSplit channel_splits */ 3: + message.channelSplits.push(ProcessBundleSplitResponse_ChannelSplit.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleSplitResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.fn_execution.v1.BundleApplication primary_roots = 1; */ + for (let i = 0; i < message.primaryRoots.length; i++) + BundleApplication.internalBinaryWrite(message.primaryRoots[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.fn_execution.v1.DelayedBundleApplication residual_roots = 2; */ + for (let i = 0; i < message.residualRoots.length; i++) + DelayedBundleApplication.internalBinaryWrite(message.residualRoots[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse.ChannelSplit channel_splits = 3; */ + for (let i = 0; i < message.channelSplits.length; i++) + ProcessBundleSplitResponse_ChannelSplit.internalBinaryWrite(message.channelSplits[i], writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse + */ +export const ProcessBundleSplitResponse = new ProcessBundleSplitResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessBundleSplitResponse_ChannelSplit$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse.ChannelSplit", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "last_primary_element", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "first_residual_element", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): ProcessBundleSplitResponse_ChannelSplit { + const message = { transformId: "", lastPrimaryElement: 0n, firstResidualElement: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessBundleSplitResponse_ChannelSplit): ProcessBundleSplitResponse_ChannelSplit { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* int64 last_primary_element */ 2: + message.lastPrimaryElement = reader.int64().toBigInt(); + break; + case /* int64 first_residual_element */ 3: + message.firstResidualElement = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ProcessBundleSplitResponse_ChannelSplit, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* int64 last_primary_element = 2; */ + if (message.lastPrimaryElement !== 0n) + writer.tag(2, WireType.Varint).int64(message.lastPrimaryElement); + /* int64 first_residual_element = 3; */ + if (message.firstResidualElement !== 0n) + writer.tag(3, WireType.Varint).int64(message.firstResidualElement); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProcessBundleSplitResponse.ChannelSplit + */ +export const ProcessBundleSplitResponse_ChannelSplit = new ProcessBundleSplitResponse_ChannelSplit$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FinalizeBundleRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.FinalizeBundleRequest", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): FinalizeBundleRequest { + const message = { instructionId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeBundleRequest): FinalizeBundleRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FinalizeBundleRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.FinalizeBundleRequest + */ +export const FinalizeBundleRequest = new FinalizeBundleRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FinalizeBundleResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.FinalizeBundleResponse", []); + } + create(value?: PartialMessage): FinalizeBundleResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FinalizeBundleResponse): FinalizeBundleResponse { + return target ?? this.create(); + } + internalBinaryWrite(message: FinalizeBundleResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.FinalizeBundleResponse + */ +export const FinalizeBundleResponse = new FinalizeBundleResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Elements$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.Elements", [ + { no: 1, name: "data", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Elements_Data }, + { no: 2, name: "timers", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Elements_Timers } + ]); + } + create(value?: PartialMessage): Elements { + const message = { data: [], timers: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Elements): Elements { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.fn_execution.v1.Elements.Data data */ 1: + message.data.push(Elements_Data.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.fn_execution.v1.Elements.Timers timers */ 2: + message.timers.push(Elements_Timers.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Elements, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.fn_execution.v1.Elements.Data data = 1; */ + for (let i = 0; i < message.data.length; i++) + Elements_Data.internalBinaryWrite(message.data[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.fn_execution.v1.Elements.Timers timers = 2; */ + for (let i = 0; i < message.timers.length; i++) + Elements_Timers.internalBinaryWrite(message.timers[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.Elements + */ +export const Elements = new Elements$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Elements_Data$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.Elements.Data", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "is_last", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + create(value?: PartialMessage): Elements_Data { + const message = { instructionId: "", transformId: "", data: new Uint8Array(0), isLast: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Elements_Data): Elements_Data { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + case /* string transform_id */ 2: + message.transformId = reader.string(); + break; + case /* bytes data */ 3: + message.data = reader.bytes(); + break; + case /* bool is_last */ 4: + message.isLast = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Elements_Data, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + /* string transform_id = 2; */ + if (message.transformId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.transformId); + /* bytes data = 3; */ + if (message.data.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.data); + /* bool is_last = 4; */ + if (message.isLast !== false) + writer.tag(4, WireType.Varint).bool(message.isLast); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.Elements.Data + */ +export const Elements_Data = new Elements_Data$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Elements_Timers$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.Elements.Timers", [ + { no: 1, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "timer_family_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "timers", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 5, name: "is_last", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + create(value?: PartialMessage): Elements_Timers { + const message = { instructionId: "", transformId: "", timerFamilyId: "", timers: new Uint8Array(0), isLast: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Elements_Timers): Elements_Timers { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string instruction_id */ 1: + message.instructionId = reader.string(); + break; + case /* string transform_id */ 2: + message.transformId = reader.string(); + break; + case /* string timer_family_id */ 3: + message.timerFamilyId = reader.string(); + break; + case /* bytes timers */ 4: + message.timers = reader.bytes(); + break; + case /* bool is_last */ 5: + message.isLast = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Elements_Timers, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string instruction_id = 1; */ + if (message.instructionId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.instructionId); + /* string transform_id = 2; */ + if (message.transformId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.transformId); + /* string timer_family_id = 3; */ + if (message.timerFamilyId !== "") + writer.tag(3, WireType.LengthDelimited).string(message.timerFamilyId); + /* bytes timers = 4; */ + if (message.timers.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.timers); + /* bool is_last = 5; */ + if (message.isLast !== false) + writer.tag(5, WireType.Varint).bool(message.isLast); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.Elements.Timers + */ +export const Elements_Timers = new Elements_Timers$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateRequest", [ + { no: 1, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "state_key", kind: "message", T: () => StateKey }, + { no: 1000, name: "get", kind: "message", oneof: "request", T: () => StateGetRequest }, + { no: 1001, name: "append", kind: "message", oneof: "request", T: () => StateAppendRequest }, + { no: 1002, name: "clear", kind: "message", oneof: "request", T: () => StateClearRequest } + ]); + } + create(value?: PartialMessage): StateRequest { + const message = { id: "", instructionId: "", request: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateRequest): StateRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string id */ 1: + message.id = reader.string(); + break; + case /* string instruction_id */ 2: + message.instructionId = reader.string(); + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey state_key */ 3: + message.stateKey = StateKey.internalBinaryRead(reader, reader.uint32(), options, message.stateKey); + break; + case /* org.apache.beam.model.fn_execution.v1.StateGetRequest get */ 1000: + message.request = { + oneofKind: "get", + get: StateGetRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).get) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateAppendRequest append */ 1001: + message.request = { + oneofKind: "append", + append: StateAppendRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).append) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateClearRequest clear */ 1002: + message.request = { + oneofKind: "clear", + clear: StateClearRequest.internalBinaryRead(reader, reader.uint32(), options, (message.request as any).clear) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string id = 1; */ + if (message.id !== "") + writer.tag(1, WireType.LengthDelimited).string(message.id); + /* string instruction_id = 2; */ + if (message.instructionId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.instructionId); + /* org.apache.beam.model.fn_execution.v1.StateKey state_key = 3; */ + if (message.stateKey) + StateKey.internalBinaryWrite(message.stateKey, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateGetRequest get = 1000; */ + if (message.request.oneofKind === "get") + StateGetRequest.internalBinaryWrite(message.request.get, writer.tag(1000, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateAppendRequest append = 1001; */ + if (message.request.oneofKind === "append") + StateAppendRequest.internalBinaryWrite(message.request.append, writer.tag(1001, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateClearRequest clear = 1002; */ + if (message.request.oneofKind === "clear") + StateClearRequest.internalBinaryWrite(message.request.clear, writer.tag(1002, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateRequest + */ +export const StateRequest = new StateRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateResponse", [ + { no: 1, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 1000, name: "get", kind: "message", oneof: "response", T: () => StateGetResponse }, + { no: 1001, name: "append", kind: "message", oneof: "response", T: () => StateAppendResponse }, + { no: 1002, name: "clear", kind: "message", oneof: "response", T: () => StateClearResponse } + ]); + } + create(value?: PartialMessage): StateResponse { + const message = { id: "", error: "", response: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateResponse): StateResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string id */ 1: + message.id = reader.string(); + break; + case /* string error */ 2: + message.error = reader.string(); + break; + case /* org.apache.beam.model.fn_execution.v1.StateGetResponse get */ 1000: + message.response = { + oneofKind: "get", + get: StateGetResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).get) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateAppendResponse append */ 1001: + message.response = { + oneofKind: "append", + append: StateAppendResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).append) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateClearResponse clear */ 1002: + message.response = { + oneofKind: "clear", + clear: StateClearResponse.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).clear) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string id = 1; */ + if (message.id !== "") + writer.tag(1, WireType.LengthDelimited).string(message.id); + /* string error = 2; */ + if (message.error !== "") + writer.tag(2, WireType.LengthDelimited).string(message.error); + /* org.apache.beam.model.fn_execution.v1.StateGetResponse get = 1000; */ + if (message.response.oneofKind === "get") + StateGetResponse.internalBinaryWrite(message.response.get, writer.tag(1000, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateAppendResponse append = 1001; */ + if (message.response.oneofKind === "append") + StateAppendResponse.internalBinaryWrite(message.response.append, writer.tag(1001, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateClearResponse clear = 1002; */ + if (message.response.oneofKind === "clear") + StateClearResponse.internalBinaryWrite(message.response.clear, writer.tag(1002, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateResponse + */ +export const StateResponse = new StateResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey", [ + { no: 1, name: "runner", kind: "message", oneof: "type", T: () => StateKey_Runner }, + { no: 2, name: "multimap_side_input", kind: "message", oneof: "type", T: () => StateKey_MultimapSideInput }, + { no: 3, name: "bag_user_state", kind: "message", oneof: "type", T: () => StateKey_BagUserState }, + { no: 4, name: "iterable_side_input", kind: "message", oneof: "type", T: () => StateKey_IterableSideInput }, + { no: 5, name: "multimap_keys_side_input", kind: "message", oneof: "type", T: () => StateKey_MultimapKeysSideInput }, + { no: 6, name: "multimap_keys_user_state", kind: "message", oneof: "type", T: () => StateKey_MultimapKeysUserState }, + { no: 7, name: "multimap_user_state", kind: "message", oneof: "type", T: () => StateKey_MultimapUserState } + ]); + } + create(value?: PartialMessage): StateKey { + const message = { type: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey): StateKey { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.fn_execution.v1.StateKey.Runner runner */ 1: + message.type = { + oneofKind: "runner", + runner: StateKey_Runner.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).runner) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapSideInput multimap_side_input */ 2: + message.type = { + oneofKind: "multimapSideInput", + multimapSideInput: StateKey_MultimapSideInput.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).multimapSideInput) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey.BagUserState bag_user_state */ 3: + message.type = { + oneofKind: "bagUserState", + bagUserState: StateKey_BagUserState.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).bagUserState) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey.IterableSideInput iterable_side_input */ 4: + message.type = { + oneofKind: "iterableSideInput", + iterableSideInput: StateKey_IterableSideInput.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).iterableSideInput) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysSideInput multimap_keys_side_input */ 5: + message.type = { + oneofKind: "multimapKeysSideInput", + multimapKeysSideInput: StateKey_MultimapKeysSideInput.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).multimapKeysSideInput) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysUserState multimap_keys_user_state */ 6: + message.type = { + oneofKind: "multimapKeysUserState", + multimapKeysUserState: StateKey_MultimapKeysUserState.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).multimapKeysUserState) + }; + break; + case /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapUserState multimap_user_state */ 7: + message.type = { + oneofKind: "multimapUserState", + multimapUserState: StateKey_MultimapUserState.internalBinaryRead(reader, reader.uint32(), options, (message.type as any).multimapUserState) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.fn_execution.v1.StateKey.Runner runner = 1; */ + if (message.type.oneofKind === "runner") + StateKey_Runner.internalBinaryWrite(message.type.runner, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapSideInput multimap_side_input = 2; */ + if (message.type.oneofKind === "multimapSideInput") + StateKey_MultimapSideInput.internalBinaryWrite(message.type.multimapSideInput, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateKey.BagUserState bag_user_state = 3; */ + if (message.type.oneofKind === "bagUserState") + StateKey_BagUserState.internalBinaryWrite(message.type.bagUserState, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateKey.IterableSideInput iterable_side_input = 4; */ + if (message.type.oneofKind === "iterableSideInput") + StateKey_IterableSideInput.internalBinaryWrite(message.type.iterableSideInput, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysSideInput multimap_keys_side_input = 5; */ + if (message.type.oneofKind === "multimapKeysSideInput") + StateKey_MultimapKeysSideInput.internalBinaryWrite(message.type.multimapKeysSideInput, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysUserState multimap_keys_user_state = 6; */ + if (message.type.oneofKind === "multimapKeysUserState") + StateKey_MultimapKeysUserState.internalBinaryWrite(message.type.multimapKeysUserState, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.fn_execution.v1.StateKey.MultimapUserState multimap_user_state = 7; */ + if (message.type.oneofKind === "multimapUserState") + StateKey_MultimapUserState.internalBinaryWrite(message.type.multimapUserState, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey + */ +export const StateKey = new StateKey$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_Runner$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.Runner", [ + { no: 1, name: "key", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_Runner { + const message = { key: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_Runner): StateKey_Runner { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes key */ 1: + message.key = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_Runner, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes key = 1; */ + if (message.key.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.Runner + */ +export const StateKey_Runner = new StateKey_Runner$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_IterableSideInput$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.IterableSideInput", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "side_input_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "window", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_IterableSideInput { + const message = { transformId: "", sideInputId: "", window: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_IterableSideInput): StateKey_IterableSideInput { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string side_input_id */ 2: + message.sideInputId = reader.string(); + break; + case /* bytes window */ 3: + message.window = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_IterableSideInput, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string side_input_id = 2; */ + if (message.sideInputId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.sideInputId); + /* bytes window = 3; */ + if (message.window.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.window); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.IterableSideInput + */ +export const StateKey_IterableSideInput = new StateKey_IterableSideInput$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_MultimapSideInput$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.MultimapSideInput", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "side_input_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "window", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "key", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_MultimapSideInput { + const message = { transformId: "", sideInputId: "", window: new Uint8Array(0), key: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_MultimapSideInput): StateKey_MultimapSideInput { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string side_input_id */ 2: + message.sideInputId = reader.string(); + break; + case /* bytes window */ 3: + message.window = reader.bytes(); + break; + case /* bytes key */ 4: + message.key = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_MultimapSideInput, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string side_input_id = 2; */ + if (message.sideInputId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.sideInputId); + /* bytes window = 3; */ + if (message.window.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.window); + /* bytes key = 4; */ + if (message.key.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapSideInput + */ +export const StateKey_MultimapSideInput = new StateKey_MultimapSideInput$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_MultimapKeysSideInput$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysSideInput", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "side_input_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "window", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_MultimapKeysSideInput { + const message = { transformId: "", sideInputId: "", window: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_MultimapKeysSideInput): StateKey_MultimapKeysSideInput { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string side_input_id */ 2: + message.sideInputId = reader.string(); + break; + case /* bytes window */ 3: + message.window = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_MultimapKeysSideInput, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string side_input_id = 2; */ + if (message.sideInputId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.sideInputId); + /* bytes window = 3; */ + if (message.window.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.window); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysSideInput + */ +export const StateKey_MultimapKeysSideInput = new StateKey_MultimapKeysSideInput$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_BagUserState$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.BagUserState", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "user_state_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "window", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "key", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_BagUserState { + const message = { transformId: "", userStateId: "", window: new Uint8Array(0), key: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_BagUserState): StateKey_BagUserState { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string user_state_id */ 2: + message.userStateId = reader.string(); + break; + case /* bytes window */ 3: + message.window = reader.bytes(); + break; + case /* bytes key */ 4: + message.key = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_BagUserState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string user_state_id = 2; */ + if (message.userStateId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.userStateId); + /* bytes window = 3; */ + if (message.window.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.window); + /* bytes key = 4; */ + if (message.key.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.BagUserState + */ +export const StateKey_BagUserState = new StateKey_BagUserState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_MultimapKeysUserState$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysUserState", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "user_state_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "window", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "key", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_MultimapKeysUserState { + const message = { transformId: "", userStateId: "", window: new Uint8Array(0), key: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_MultimapKeysUserState): StateKey_MultimapKeysUserState { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string user_state_id */ 2: + message.userStateId = reader.string(); + break; + case /* bytes window */ 3: + message.window = reader.bytes(); + break; + case /* bytes key */ 4: + message.key = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_MultimapKeysUserState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string user_state_id = 2; */ + if (message.userStateId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.userStateId); + /* bytes window = 3; */ + if (message.window.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.window); + /* bytes key = 4; */ + if (message.key.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.key); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapKeysUserState + */ +export const StateKey_MultimapKeysUserState = new StateKey_MultimapKeysUserState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateKey_MultimapUserState$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateKey.MultimapUserState", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "user_state_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "window", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "key", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 5, name: "map_key", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateKey_MultimapUserState { + const message = { transformId: "", userStateId: "", window: new Uint8Array(0), key: new Uint8Array(0), mapKey: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateKey_MultimapUserState): StateKey_MultimapUserState { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string user_state_id */ 2: + message.userStateId = reader.string(); + break; + case /* bytes window */ 3: + message.window = reader.bytes(); + break; + case /* bytes key */ 4: + message.key = reader.bytes(); + break; + case /* bytes map_key */ 5: + message.mapKey = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateKey_MultimapUserState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string user_state_id = 2; */ + if (message.userStateId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.userStateId); + /* bytes window = 3; */ + if (message.window.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.window); + /* bytes key = 4; */ + if (message.key.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.key); + /* bytes map_key = 5; */ + if (message.mapKey.length) + writer.tag(5, WireType.LengthDelimited).bytes(message.mapKey); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateKey.MultimapUserState + */ +export const StateKey_MultimapUserState = new StateKey_MultimapUserState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateGetRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateGetRequest", [ + { no: 1, name: "continuation_token", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateGetRequest { + const message = { continuationToken: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateGetRequest): StateGetRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes continuation_token */ 1: + message.continuationToken = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateGetRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes continuation_token = 1; */ + if (message.continuationToken.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.continuationToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateGetRequest + */ +export const StateGetRequest = new StateGetRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateGetResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateGetResponse", [ + { no: 1, name: "continuation_token", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 2, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateGetResponse { + const message = { continuationToken: new Uint8Array(0), data: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateGetResponse): StateGetResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes continuation_token */ 1: + message.continuationToken = reader.bytes(); + break; + case /* bytes data */ 2: + message.data = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateGetResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes continuation_token = 1; */ + if (message.continuationToken.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.continuationToken); + /* bytes data = 2; */ + if (message.data.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.data); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateGetResponse + */ +export const StateGetResponse = new StateGetResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateAppendRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateAppendRequest", [ + { no: 1, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): StateAppendRequest { + const message = { data: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateAppendRequest): StateAppendRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes data */ 1: + message.data = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateAppendRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes data = 1; */ + if (message.data.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.data); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateAppendRequest + */ +export const StateAppendRequest = new StateAppendRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateAppendResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateAppendResponse", []); + } + create(value?: PartialMessage): StateAppendResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateAppendResponse): StateAppendResponse { + return target ?? this.create(); + } + internalBinaryWrite(message: StateAppendResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateAppendResponse + */ +export const StateAppendResponse = new StateAppendResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateClearRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateClearRequest", []); + } + create(value?: PartialMessage): StateClearRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateClearRequest): StateClearRequest { + return target ?? this.create(); + } + internalBinaryWrite(message: StateClearRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateClearRequest + */ +export const StateClearRequest = new StateClearRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateClearResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StateClearResponse", []); + } + create(value?: PartialMessage): StateClearResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateClearResponse): StateClearResponse { + return target ?? this.create(); + } + internalBinaryWrite(message: StateClearResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StateClearResponse + */ +export const StateClearResponse = new StateClearResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LogEntry$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.LogEntry", [ + { no: 1, name: "severity", kind: "enum", T: () => ["org.apache.beam.model.fn_execution.v1.LogEntry.Severity.Enum", LogEntry_Severity_Enum] }, + { no: 2, name: "timestamp", kind: "message", T: () => Timestamp }, + { no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "trace", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "instruction_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 7, name: "log_location", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 8, name: "thread", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): LogEntry { + const message = { severity: 0, message: "", trace: "", instructionId: "", transformId: "", logLocation: "", thread: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LogEntry): LogEntry { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.fn_execution.v1.LogEntry.Severity.Enum severity */ 1: + message.severity = reader.int32(); + break; + case /* google.protobuf.Timestamp timestamp */ 2: + message.timestamp = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.timestamp); + break; + case /* string message */ 3: + message.message = reader.string(); + break; + case /* string trace */ 4: + message.trace = reader.string(); + break; + case /* string instruction_id */ 5: + message.instructionId = reader.string(); + break; + case /* string transform_id */ 6: + message.transformId = reader.string(); + break; + case /* string log_location */ 7: + message.logLocation = reader.string(); + break; + case /* string thread */ 8: + message.thread = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LogEntry, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.fn_execution.v1.LogEntry.Severity.Enum severity = 1; */ + if (message.severity !== 0) + writer.tag(1, WireType.Varint).int32(message.severity); + /* google.protobuf.Timestamp timestamp = 2; */ + if (message.timestamp) + Timestamp.internalBinaryWrite(message.timestamp, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* string message = 3; */ + if (message.message !== "") + writer.tag(3, WireType.LengthDelimited).string(message.message); + /* string trace = 4; */ + if (message.trace !== "") + writer.tag(4, WireType.LengthDelimited).string(message.trace); + /* string instruction_id = 5; */ + if (message.instructionId !== "") + writer.tag(5, WireType.LengthDelimited).string(message.instructionId); + /* string transform_id = 6; */ + if (message.transformId !== "") + writer.tag(6, WireType.LengthDelimited).string(message.transformId); + /* string log_location = 7; */ + if (message.logLocation !== "") + writer.tag(7, WireType.LengthDelimited).string(message.logLocation); + /* string thread = 8; */ + if (message.thread !== "") + writer.tag(8, WireType.LengthDelimited).string(message.thread); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.LogEntry + */ +export const LogEntry = new LogEntry$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LogEntry_List$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.LogEntry.List", [ + { no: 1, name: "log_entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => LogEntry } + ]); + } + create(value?: PartialMessage): LogEntry_List { + const message = { logEntries: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LogEntry_List): LogEntry_List { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.fn_execution.v1.LogEntry log_entries */ 1: + message.logEntries.push(LogEntry.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LogEntry_List, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.fn_execution.v1.LogEntry log_entries = 1; */ + for (let i = 0; i < message.logEntries.length; i++) + LogEntry.internalBinaryWrite(message.logEntries[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.LogEntry.List + */ +export const LogEntry_List = new LogEntry_List$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LogEntry_Severity$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.LogEntry.Severity", []); + } + create(value?: PartialMessage): LogEntry_Severity { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LogEntry_Severity): LogEntry_Severity { + return target ?? this.create(); + } + internalBinaryWrite(message: LogEntry_Severity, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.LogEntry.Severity + */ +export const LogEntry_Severity = new LogEntry_Severity$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LogControl$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.LogControl", []); + } + create(value?: PartialMessage): LogControl { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LogControl): LogControl { + return target ?? this.create(); + } + internalBinaryWrite(message: LogControl, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.LogControl + */ +export const LogControl = new LogControl$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StartWorkerRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StartWorkerRequest", [ + { no: 1, name: "worker_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "control_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 3, name: "logging_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 4, name: "artifact_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 5, name: "provision_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 10, name: "params", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } } + ]); + } + create(value?: PartialMessage): StartWorkerRequest { + const message = { workerId: "", params: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StartWorkerRequest): StartWorkerRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string worker_id */ 1: + message.workerId = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor control_endpoint */ 2: + message.controlEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.controlEndpoint); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor logging_endpoint */ 3: + message.loggingEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.loggingEndpoint); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_endpoint */ 4: + message.artifactEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.artifactEndpoint); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor provision_endpoint */ 5: + message.provisionEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.provisionEndpoint); + break; + case /* map params */ 10: + this.binaryReadMap10(message.params, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap10(map: StartWorkerRequest["params"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof StartWorkerRequest["params"] | undefined, val: StartWorkerRequest["params"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.StartWorkerRequest.params"); + } + } + map[key ?? ""] = val ?? ""; + } + internalBinaryWrite(message: StartWorkerRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string worker_id = 1; */ + if (message.workerId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workerId); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor control_endpoint = 2; */ + if (message.controlEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.controlEndpoint, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor logging_endpoint = 3; */ + if (message.loggingEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.loggingEndpoint, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_endpoint = 4; */ + if (message.artifactEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.artifactEndpoint, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor provision_endpoint = 5; */ + if (message.provisionEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.provisionEndpoint, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + /* map params = 10; */ + for (let k of Object.keys(message.params)) + writer.tag(10, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.params[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StartWorkerRequest + */ +export const StartWorkerRequest = new StartWorkerRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StartWorkerResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StartWorkerResponse", [ + { no: 1, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): StartWorkerResponse { + const message = { error: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StartWorkerResponse): StartWorkerResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string error */ 1: + message.error = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StartWorkerResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string error = 1; */ + if (message.error !== "") + writer.tag(1, WireType.LengthDelimited).string(message.error); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StartWorkerResponse + */ +export const StartWorkerResponse = new StartWorkerResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StopWorkerRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StopWorkerRequest", [ + { no: 1, name: "worker_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): StopWorkerRequest { + const message = { workerId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StopWorkerRequest): StopWorkerRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string worker_id */ 1: + message.workerId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StopWorkerRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string worker_id = 1; */ + if (message.workerId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.workerId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StopWorkerRequest + */ +export const StopWorkerRequest = new StopWorkerRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StopWorkerResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.StopWorkerResponse", [ + { no: 1, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): StopWorkerResponse { + const message = { error: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StopWorkerResponse): StopWorkerResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string error */ 1: + message.error = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StopWorkerResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string error = 1; */ + if (message.error !== "") + writer.tag(1, WireType.LengthDelimited).string(message.error); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.StopWorkerResponse + */ +export const StopWorkerResponse = new StopWorkerResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class WorkerStatusRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.WorkerStatusRequest", [ + { no: 1, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): WorkerStatusRequest { + const message = { id: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: WorkerStatusRequest): WorkerStatusRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string id */ 1: + message.id = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: WorkerStatusRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string id = 1; */ + if (message.id !== "") + writer.tag(1, WireType.LengthDelimited).string(message.id); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.WorkerStatusRequest + */ +export const WorkerStatusRequest = new WorkerStatusRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class WorkerStatusResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.WorkerStatusResponse", [ + { no: 1, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "error", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "status_info", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): WorkerStatusResponse { + const message = { id: "", error: "", statusInfo: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: WorkerStatusResponse): WorkerStatusResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string id */ 1: + message.id = reader.string(); + break; + case /* string error */ 2: + message.error = reader.string(); + break; + case /* string status_info */ 3: + message.statusInfo = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: WorkerStatusResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string id = 1; */ + if (message.id !== "") + writer.tag(1, WireType.LengthDelimited).string(message.id); + /* string error = 2; */ + if (message.error !== "") + writer.tag(2, WireType.LengthDelimited).string(message.error); + /* string status_info = 3; */ + if (message.statusInfo !== "") + writer.tag(3, WireType.LengthDelimited).string(message.statusInfo); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.WorkerStatusResponse + */ +export const WorkerStatusResponse = new WorkerStatusResponse$Type(); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.BeamFnControl + */ +export const BeamFnControl = new ServiceType("org.apache.beam.model.fn_execution.v1.BeamFnControl", [ + { name: "Control", serverStreaming: true, clientStreaming: true, options: {}, I: InstructionResponse, O: InstructionRequest }, + { name: "GetProcessBundleDescriptor", options: {}, I: GetProcessBundleDescriptorRequest, O: ProcessBundleDescriptor } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.BeamFnData + */ +export const BeamFnData = new ServiceType("org.apache.beam.model.fn_execution.v1.BeamFnData", [ + { name: "Data", serverStreaming: true, clientStreaming: true, options: {}, I: Elements, O: Elements } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.BeamFnState + */ +export const BeamFnState = new ServiceType("org.apache.beam.model.fn_execution.v1.BeamFnState", [ + { name: "State", serverStreaming: true, clientStreaming: true, options: {}, I: StateRequest, O: StateResponse } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.BeamFnLogging + */ +export const BeamFnLogging = new ServiceType("org.apache.beam.model.fn_execution.v1.BeamFnLogging", [ + { name: "Logging", serverStreaming: true, clientStreaming: true, options: {}, I: LogEntry_List, O: LogControl } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.BeamFnExternalWorkerPool + */ +export const BeamFnExternalWorkerPool = new ServiceType("org.apache.beam.model.fn_execution.v1.BeamFnExternalWorkerPool", [ + { name: "StartWorker", options: {}, I: StartWorkerRequest, O: StartWorkerResponse }, + { name: "StopWorker", options: {}, I: StopWorkerRequest, O: StopWorkerResponse } +]); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.BeamFnWorkerStatus + */ +export const BeamFnWorkerStatus = new ServiceType("org.apache.beam.model.fn_execution.v1.BeamFnWorkerStatus", [ + { name: "WorkerStatus", serverStreaming: true, clientStreaming: true, options: {}, I: WorkerStatusResponse, O: WorkerStatusRequest } +]); diff --git a/sdks/node-ts/src/apache_beam/proto/beam_job_api.client.ts b/sdks/node-ts/src/apache_beam/proto/beam_job_api.client.ts new file mode 100644 index 000000000000..b39f0bfda121 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_job_api.client.ts @@ -0,0 +1,222 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_job_api.proto" (package "org.apache.beam.model.job_management.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Job API, api for communicating with a runner +// for job submission over GRPC. +// +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { JobService } from "./beam_job_api"; +import type { DescribePipelineOptionsResponse } from "./beam_job_api"; +import type { DescribePipelineOptionsRequest } from "./beam_job_api"; +import type { GetJobMetricsResponse } from "./beam_job_api"; +import type { GetJobMetricsRequest } from "./beam_job_api"; +import type { JobMessagesResponse } from "./beam_job_api"; +import type { JobMessagesRequest } from "./beam_job_api"; +import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc"; +import type { CancelJobResponse } from "./beam_job_api"; +import type { CancelJobRequest } from "./beam_job_api"; +import type { GetJobPipelineResponse } from "./beam_job_api"; +import type { GetJobPipelineRequest } from "./beam_job_api"; +import type { JobStateEvent } from "./beam_job_api"; +import type { GetJobStateRequest } from "./beam_job_api"; +import type { GetJobsResponse } from "./beam_job_api"; +import type { GetJobsRequest } from "./beam_job_api"; +import type { RunJobResponse } from "./beam_job_api"; +import type { RunJobRequest } from "./beam_job_api"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { PrepareJobResponse } from "./beam_job_api"; +import type { PrepareJobRequest } from "./beam_job_api"; +import type { UnaryCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +/** + * Job Service for running RunnerAPI pipelines + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.JobService + */ +export interface IJobServiceClient { + /** + * Prepare a job for execution. The job will not be executed until a call is made to run with the + * returned preparationId. + * + * @generated from protobuf rpc: Prepare(org.apache.beam.model.job_management.v1.PrepareJobRequest) returns (org.apache.beam.model.job_management.v1.PrepareJobResponse); + */ + prepare(input: PrepareJobRequest, options?: RpcOptions): UnaryCall; + /** + * Submit the job for execution + * + * @generated from protobuf rpc: Run(org.apache.beam.model.job_management.v1.RunJobRequest) returns (org.apache.beam.model.job_management.v1.RunJobResponse); + */ + run(input: RunJobRequest, options?: RpcOptions): UnaryCall; + /** + * Get a list of all invoked jobs + * + * @generated from protobuf rpc: GetJobs(org.apache.beam.model.job_management.v1.GetJobsRequest) returns (org.apache.beam.model.job_management.v1.GetJobsResponse); + */ + getJobs(input: GetJobsRequest, options?: RpcOptions): UnaryCall; + /** + * Get the current state of the job + * + * @generated from protobuf rpc: GetState(org.apache.beam.model.job_management.v1.GetJobStateRequest) returns (org.apache.beam.model.job_management.v1.JobStateEvent); + */ + getState(input: GetJobStateRequest, options?: RpcOptions): UnaryCall; + /** + * Get the job's pipeline + * + * @generated from protobuf rpc: GetPipeline(org.apache.beam.model.job_management.v1.GetJobPipelineRequest) returns (org.apache.beam.model.job_management.v1.GetJobPipelineResponse); + */ + getPipeline(input: GetJobPipelineRequest, options?: RpcOptions): UnaryCall; + /** + * Cancel the job + * + * @generated from protobuf rpc: Cancel(org.apache.beam.model.job_management.v1.CancelJobRequest) returns (org.apache.beam.model.job_management.v1.CancelJobResponse); + */ + cancel(input: CancelJobRequest, options?: RpcOptions): UnaryCall; + /** + * Subscribe to a stream of state changes of the job, will immediately return the current state of the job as the first response. + * + * @generated from protobuf rpc: GetStateStream(org.apache.beam.model.job_management.v1.GetJobStateRequest) returns (stream org.apache.beam.model.job_management.v1.JobStateEvent); + */ + getStateStream(input: GetJobStateRequest, options?: RpcOptions): ServerStreamingCall; + /** + * Subscribe to a stream of state changes and messages from the job + * + * @generated from protobuf rpc: GetMessageStream(org.apache.beam.model.job_management.v1.JobMessagesRequest) returns (stream org.apache.beam.model.job_management.v1.JobMessagesResponse); + */ + getMessageStream(input: JobMessagesRequest, options?: RpcOptions): ServerStreamingCall; + /** + * Fetch metrics for a given job + * + * @generated from protobuf rpc: GetJobMetrics(org.apache.beam.model.job_management.v1.GetJobMetricsRequest) returns (org.apache.beam.model.job_management.v1.GetJobMetricsResponse); + */ + getJobMetrics(input: GetJobMetricsRequest, options?: RpcOptions): UnaryCall; + /** + * Get the supported pipeline options of the runner + * + * @generated from protobuf rpc: DescribePipelineOptions(org.apache.beam.model.job_management.v1.DescribePipelineOptionsRequest) returns (org.apache.beam.model.job_management.v1.DescribePipelineOptionsResponse); + */ + describePipelineOptions(input: DescribePipelineOptionsRequest, options?: RpcOptions): UnaryCall; +} +/** + * Job Service for running RunnerAPI pipelines + * + * @generated from protobuf service org.apache.beam.model.job_management.v1.JobService + */ +export class JobServiceClient implements IJobServiceClient, ServiceInfo { + typeName = JobService.typeName; + methods = JobService.methods; + options = JobService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Prepare a job for execution. The job will not be executed until a call is made to run with the + * returned preparationId. + * + * @generated from protobuf rpc: Prepare(org.apache.beam.model.job_management.v1.PrepareJobRequest) returns (org.apache.beam.model.job_management.v1.PrepareJobResponse); + */ + prepare(input: PrepareJobRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Submit the job for execution + * + * @generated from protobuf rpc: Run(org.apache.beam.model.job_management.v1.RunJobRequest) returns (org.apache.beam.model.job_management.v1.RunJobResponse); + */ + run(input: RunJobRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[1], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Get a list of all invoked jobs + * + * @generated from protobuf rpc: GetJobs(org.apache.beam.model.job_management.v1.GetJobsRequest) returns (org.apache.beam.model.job_management.v1.GetJobsResponse); + */ + getJobs(input: GetJobsRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[2], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Get the current state of the job + * + * @generated from protobuf rpc: GetState(org.apache.beam.model.job_management.v1.GetJobStateRequest) returns (org.apache.beam.model.job_management.v1.JobStateEvent); + */ + getState(input: GetJobStateRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[3], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Get the job's pipeline + * + * @generated from protobuf rpc: GetPipeline(org.apache.beam.model.job_management.v1.GetJobPipelineRequest) returns (org.apache.beam.model.job_management.v1.GetJobPipelineResponse); + */ + getPipeline(input: GetJobPipelineRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[4], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Cancel the job + * + * @generated from protobuf rpc: Cancel(org.apache.beam.model.job_management.v1.CancelJobRequest) returns (org.apache.beam.model.job_management.v1.CancelJobResponse); + */ + cancel(input: CancelJobRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[5], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Subscribe to a stream of state changes of the job, will immediately return the current state of the job as the first response. + * + * @generated from protobuf rpc: GetStateStream(org.apache.beam.model.job_management.v1.GetJobStateRequest) returns (stream org.apache.beam.model.job_management.v1.JobStateEvent); + */ + getStateStream(input: GetJobStateRequest, options?: RpcOptions): ServerStreamingCall { + const method = this.methods[6], opt = this._transport.mergeOptions(options); + return stackIntercept("serverStreaming", this._transport, method, opt, input); + } + /** + * Subscribe to a stream of state changes and messages from the job + * + * @generated from protobuf rpc: GetMessageStream(org.apache.beam.model.job_management.v1.JobMessagesRequest) returns (stream org.apache.beam.model.job_management.v1.JobMessagesResponse); + */ + getMessageStream(input: JobMessagesRequest, options?: RpcOptions): ServerStreamingCall { + const method = this.methods[7], opt = this._transport.mergeOptions(options); + return stackIntercept("serverStreaming", this._transport, method, opt, input); + } + /** + * Fetch metrics for a given job + * + * @generated from protobuf rpc: GetJobMetrics(org.apache.beam.model.job_management.v1.GetJobMetricsRequest) returns (org.apache.beam.model.job_management.v1.GetJobMetricsResponse); + */ + getJobMetrics(input: GetJobMetricsRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[8], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } + /** + * Get the supported pipeline options of the runner + * + * @generated from protobuf rpc: DescribePipelineOptions(org.apache.beam.model.job_management.v1.DescribePipelineOptionsRequest) returns (org.apache.beam.model.job_management.v1.DescribePipelineOptionsResponse); + */ + describePipelineOptions(input: DescribePipelineOptionsRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[9], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} diff --git a/sdks/node-ts/src/apache_beam/proto/beam_job_api.ts b/sdks/node-ts/src/apache_beam/proto/beam_job_api.ts new file mode 100644 index 000000000000..04682b7cd705 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_job_api.ts @@ -0,0 +1,1737 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_job_api.proto" (package "org.apache.beam.model.job_management.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Job API, api for communicating with a runner +// for job submission over GRPC. +// +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { MonitoringInfo } from "./metrics"; +import { Timestamp } from "./google/protobuf/timestamp"; +import { ApiServiceDescriptor } from "./endpoints"; +import { Struct } from "./google/protobuf/struct"; +import { Pipeline } from "./beam_runner_api"; +/** + * Prepare is a synchronous request that returns a preparationId back + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * Throws error ALREADY_EXISTS if the jobName is reused. Runners are permitted to deduplicate based on the name of the job. + * Throws error UNKNOWN for all other issues + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.PrepareJobRequest + */ +export interface PrepareJobRequest { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Pipeline pipeline = 1; + */ + pipeline?: Pipeline; // (required) + /** + * @generated from protobuf field: google.protobuf.Struct pipeline_options = 2; + */ + pipelineOptions?: Struct; // (required) + /** + * @generated from protobuf field: string job_name = 3; + */ + jobName: string; // (required) +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.PrepareJobResponse + */ +export interface PrepareJobResponse { + /** + * (required) The ID used to associate calls made while preparing the job. preparationId is used + * to run the job. + * + * @generated from protobuf field: string preparation_id = 1; + */ + preparationId: string; + /** + * An endpoint which exposes the Beam Artifact Staging API. Artifacts used by the job should be + * staged to this endpoint, and will be available during job execution. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_staging_endpoint = 2; + */ + artifactStagingEndpoint?: ApiServiceDescriptor; + /** + * (required) Token for the artifact staging. This token also represent an artifact + * staging session with the artifact staging service. + * + * @generated from protobuf field: string staging_session_token = 3; + */ + stagingSessionToken: string; +} +/** + * Run is a synchronous request that returns a jobId back. + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * Throws error NOT_FOUND if the preparation ID does not exist + * Throws error UNKNOWN for all other issues + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.RunJobRequest + */ +export interface RunJobRequest { + /** + * (required) The ID provided by an earlier call to prepare. Runs the job. All prerequisite tasks + * must have been completed. + * + * @generated from protobuf field: string preparation_id = 1; + */ + preparationId: string; + /** + * (optional) If any artifacts have been staged for this job, contains the retrieval_token returned + * from the CommitManifestResponse. + * + * @generated from protobuf field: string retrieval_token = 2; + */ + retrievalToken: string; +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.RunJobResponse + */ +export interface RunJobResponse { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) The ID for the executing job +} +/** + * Cancel is a synchronus request that returns a job state back + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * Throws error NOT_FOUND if the jobId is not found + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.CancelJobRequest + */ +export interface CancelJobRequest { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) +} +/** + * Valid responses include any terminal state or CANCELLING + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.CancelJobResponse + */ +export interface CancelJobResponse { + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.JobState.Enum state = 1; + */ + state: JobState_Enum; // (required) +} +/** + * A subset of info provided by ProvisionApi.ProvisionInfo + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.JobInfo + */ +export interface JobInfo { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) + /** + * @generated from protobuf field: string job_name = 2; + */ + jobName: string; // (required) + /** + * @generated from protobuf field: google.protobuf.Struct pipeline_options = 3; + */ + pipelineOptions?: Struct; // (required) + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.JobState.Enum state = 4; + */ + state: JobState_Enum; // (required) +} +/** + * GetJobs is a synchronus request that returns a list of invoked jobs back + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobsRequest + */ +export interface GetJobsRequest { +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobsResponse + */ +export interface GetJobsResponse { + /** + * @generated from protobuf field: repeated org.apache.beam.model.job_management.v1.JobInfo job_info = 1; + */ + jobInfo: JobInfo[]; // (required) +} +/** + * GetState is a synchronus request that returns a job state back + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * Throws error NOT_FOUND if the jobId is not found + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobStateRequest + */ +export interface GetJobStateRequest { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.JobStateEvent + */ +export interface JobStateEvent { + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.JobState.Enum state = 1; + */ + state: JobState_Enum; // (required) + /** + * @generated from protobuf field: google.protobuf.Timestamp timestamp = 2; + */ + timestamp?: Timestamp; // (required) +} +/** + * GetPipeline is a synchronus request that returns a pipeline back + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * Throws error NOT_FOUND if the jobId is not found + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobPipelineRequest + */ +export interface GetJobPipelineRequest { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobPipelineResponse + */ +export interface GetJobPipelineResponse { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Pipeline pipeline = 1; + */ + pipeline?: Pipeline; // (required) +} +/** + * GetJobMessages is a streaming api for streaming job messages from the service + * One request will connect you to the job and you'll get a stream of job state + * and job messages back; one is used for logging and the other for detecting + * the job ended. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.JobMessagesRequest + */ +export interface JobMessagesRequest { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.JobMessage + */ +export interface JobMessage { + /** + * @generated from protobuf field: string message_id = 1; + */ + messageId: string; + /** + * @generated from protobuf field: string time = 2; + */ + time: string; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.JobMessage.MessageImportance importance = 3; + */ + importance: JobMessage_MessageImportance; + /** + * @generated from protobuf field: string message_text = 4; + */ + messageText: string; +} +/** + * @generated from protobuf enum org.apache.beam.model.job_management.v1.JobMessage.MessageImportance + */ +export enum JobMessage_MessageImportance { + /** + * @generated from protobuf enum value: MESSAGE_IMPORTANCE_UNSPECIFIED = 0; + */ + MESSAGE_IMPORTANCE_UNSPECIFIED = 0, + /** + * @generated from protobuf enum value: JOB_MESSAGE_DEBUG = 1; + */ + JOB_MESSAGE_DEBUG = 1, + /** + * @generated from protobuf enum value: JOB_MESSAGE_DETAILED = 2; + */ + JOB_MESSAGE_DETAILED = 2, + /** + * @generated from protobuf enum value: JOB_MESSAGE_BASIC = 3; + */ + JOB_MESSAGE_BASIC = 3, + /** + * @generated from protobuf enum value: JOB_MESSAGE_WARNING = 4; + */ + JOB_MESSAGE_WARNING = 4, + /** + * @generated from protobuf enum value: JOB_MESSAGE_ERROR = 5; + */ + JOB_MESSAGE_ERROR = 5 +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.JobMessagesResponse + */ +export interface JobMessagesResponse { + /** + * @generated from protobuf oneof: response + */ + response: { + oneofKind: "messageResponse"; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.JobMessage message_response = 1; + */ + messageResponse: JobMessage; + } | { + oneofKind: "stateResponse"; + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.JobStateEvent state_response = 2; + */ + stateResponse: JobStateEvent; + } | { + oneofKind: undefined; + }; +} +/** + * Enumeration of all JobStates + * + * The state transition diagram is: + * STOPPED -> STARTING -> RUNNING -> DONE + * \> FAILED + * \> CANCELLING -> CANCELLED + * \> UPDATING -> UPDATED + * \> DRAINING -> DRAINED + * + * Transitions are optional such that a job may go from STOPPED to RUNNING + * without needing to pass through STARTING. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.JobState + */ +export interface JobState { +} +/** + * @generated from protobuf enum org.apache.beam.model.job_management.v1.JobState.Enum + */ +export enum JobState_Enum { + /** + * The job state reported by a runner cannot be interpreted by the SDK. + * + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * The job has not yet started. + * + * @generated from protobuf enum value: STOPPED = 1; + */ + STOPPED = 1, + /** + * The job is currently running. + * + * @generated from protobuf enum value: RUNNING = 2; + */ + RUNNING = 2, + /** + * The job has successfully completed. (terminal) + * + * @generated from protobuf enum value: DONE = 3; + */ + DONE = 3, + /** + * The job has failed. (terminal) + * + * @generated from protobuf enum value: FAILED = 4; + */ + FAILED = 4, + /** + * The job has been explicitly cancelled. (terminal) + * + * @generated from protobuf enum value: CANCELLED = 5; + */ + CANCELLED = 5, + /** + * The job has been updated. (terminal) + * + * @generated from protobuf enum value: UPDATED = 6; + */ + UPDATED = 6, + /** + * The job is draining its data. (optional) + * + * @generated from protobuf enum value: DRAINING = 7; + */ + DRAINING = 7, + /** + * The job has completed draining its data. (terminal) + * + * @generated from protobuf enum value: DRAINED = 8; + */ + DRAINED = 8, + /** + * The job is starting up. + * + * @generated from protobuf enum value: STARTING = 9; + */ + STARTING = 9, + /** + * The job is cancelling. (optional) + * + * @generated from protobuf enum value: CANCELLING = 10; + */ + CANCELLING = 10, + /** + * The job is in the process of being updated. (optional) + * + * @generated from protobuf enum value: UPDATING = 11; + */ + UPDATING = 11 +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobMetricsRequest + */ +export interface GetJobMetricsRequest { + /** + * @generated from protobuf field: string job_id = 1; + */ + jobId: string; // (required) +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.GetJobMetricsResponse + */ +export interface GetJobMetricsResponse { + /** + * @generated from protobuf field: org.apache.beam.model.job_management.v1.MetricResults metrics = 1; + */ + metrics?: MetricResults; +} +/** + * All metrics for a given job. Runners may support one or the other or both. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.MetricResults + */ +export interface MetricResults { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.MonitoringInfo attempted = 1; + */ + attempted: MonitoringInfo[]; + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.MonitoringInfo committed = 2; + */ + committed: MonitoringInfo[]; +} +/** + * DescribePipelineOptions provides metadata about the options supported by a runner. + * It will be used by the SDK client to validate the options specified by or + * list available options to the user. + * Throws error GRPC_STATUS_UNAVAILABLE if server is down + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.DescribePipelineOptionsRequest + */ +export interface DescribePipelineOptionsRequest { +} +/** + * Type for pipeline options. + * Types mirror those of JSON, since that's how pipeline options are serialized. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.PipelineOptionType + */ +export interface PipelineOptionType { +} +/** + * @generated from protobuf enum org.apache.beam.model.job_management.v1.PipelineOptionType.Enum + */ +export enum PipelineOptionType_Enum { + /** + * @generated from protobuf enum value: STRING = 0; + */ + STRING = 0, + /** + * @generated from protobuf enum value: BOOLEAN = 1; + */ + BOOLEAN = 1, + /** + * whole numbers, see https://json-schema.org/understanding-json-schema/reference/numeric.html + * + * @generated from protobuf enum value: INTEGER = 2; + */ + INTEGER = 2, + /** + * @generated from protobuf enum value: NUMBER = 3; + */ + NUMBER = 3, + /** + * @generated from protobuf enum value: ARRAY = 4; + */ + ARRAY = 4, + /** + * @generated from protobuf enum value: OBJECT = 5; + */ + OBJECT = 5 +} +/** + * Metadata for a pipeline option. + * + * @generated from protobuf message org.apache.beam.model.job_management.v1.PipelineOptionDescriptor + */ +export interface PipelineOptionDescriptor { + /** + * (Required) The option name. + * + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * (Required) Type of option. + * + * @generated from protobuf field: org.apache.beam.model.job_management.v1.PipelineOptionType.Enum type = 2; + */ + type: PipelineOptionType_Enum; + /** + * (Optional) Description suitable for display / help text. + * + * @generated from protobuf field: string description = 3; + */ + description: string; + /** + * (Optional) Default value. + * + * @generated from protobuf field: string default_value = 4; + */ + defaultValue: string; + /** + * (Required) The group this option belongs to. + * + * @generated from protobuf field: string group = 5; + */ + group: string; +} +/** + * @generated from protobuf message org.apache.beam.model.job_management.v1.DescribePipelineOptionsResponse + */ +export interface DescribePipelineOptionsResponse { + /** + * List of pipeline option descriptors. + * + * @generated from protobuf field: repeated org.apache.beam.model.job_management.v1.PipelineOptionDescriptor options = 1; + */ + options: PipelineOptionDescriptor[]; +} +// @generated message type with reflection information, may provide speed optimized methods +class PrepareJobRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PrepareJobRequest", [ + { no: 1, name: "pipeline", kind: "message", T: () => Pipeline }, + { no: 2, name: "pipeline_options", kind: "message", T: () => Struct }, + { no: 3, name: "job_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): PrepareJobRequest { + const message = { jobName: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PrepareJobRequest): PrepareJobRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Pipeline pipeline */ 1: + message.pipeline = Pipeline.internalBinaryRead(reader, reader.uint32(), options, message.pipeline); + break; + case /* google.protobuf.Struct pipeline_options */ 2: + message.pipelineOptions = Struct.internalBinaryRead(reader, reader.uint32(), options, message.pipelineOptions); + break; + case /* string job_name */ 3: + message.jobName = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PrepareJobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Pipeline pipeline = 1; */ + if (message.pipeline) + Pipeline.internalBinaryWrite(message.pipeline, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* google.protobuf.Struct pipeline_options = 2; */ + if (message.pipelineOptions) + Struct.internalBinaryWrite(message.pipelineOptions, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* string job_name = 3; */ + if (message.jobName !== "") + writer.tag(3, WireType.LengthDelimited).string(message.jobName); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PrepareJobRequest + */ +export const PrepareJobRequest = new PrepareJobRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PrepareJobResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PrepareJobResponse", [ + { no: 1, name: "preparation_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "artifact_staging_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 3, name: "staging_session_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): PrepareJobResponse { + const message = { preparationId: "", stagingSessionToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PrepareJobResponse): PrepareJobResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string preparation_id */ 1: + message.preparationId = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_staging_endpoint */ 2: + message.artifactStagingEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.artifactStagingEndpoint); + break; + case /* string staging_session_token */ 3: + message.stagingSessionToken = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PrepareJobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string preparation_id = 1; */ + if (message.preparationId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.preparationId); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_staging_endpoint = 2; */ + if (message.artifactStagingEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.artifactStagingEndpoint, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* string staging_session_token = 3; */ + if (message.stagingSessionToken !== "") + writer.tag(3, WireType.LengthDelimited).string(message.stagingSessionToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PrepareJobResponse + */ +export const PrepareJobResponse = new PrepareJobResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class RunJobRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.RunJobRequest", [ + { no: 1, name: "preparation_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "retrieval_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): RunJobRequest { + const message = { preparationId: "", retrievalToken: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: RunJobRequest): RunJobRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string preparation_id */ 1: + message.preparationId = reader.string(); + break; + case /* string retrieval_token */ 2: + message.retrievalToken = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: RunJobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string preparation_id = 1; */ + if (message.preparationId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.preparationId); + /* string retrieval_token = 2; */ + if (message.retrievalToken !== "") + writer.tag(2, WireType.LengthDelimited).string(message.retrievalToken); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.RunJobRequest + */ +export const RunJobRequest = new RunJobRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class RunJobResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.RunJobResponse", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): RunJobResponse { + const message = { jobId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: RunJobResponse): RunJobResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: RunJobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.RunJobResponse + */ +export const RunJobResponse = new RunJobResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CancelJobRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.CancelJobRequest", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CancelJobRequest { + const message = { jobId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CancelJobRequest): CancelJobRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CancelJobRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.CancelJobRequest + */ +export const CancelJobRequest = new CancelJobRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CancelJobResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.CancelJobResponse", [ + { no: 1, name: "state", kind: "enum", T: () => ["org.apache.beam.model.job_management.v1.JobState.Enum", JobState_Enum] } + ]); + } + create(value?: PartialMessage): CancelJobResponse { + const message = { state: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CancelJobResponse): CancelJobResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.JobState.Enum state */ 1: + message.state = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CancelJobResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.JobState.Enum state = 1; */ + if (message.state !== 0) + writer.tag(1, WireType.Varint).int32(message.state); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.CancelJobResponse + */ +export const CancelJobResponse = new CancelJobResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobInfo$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.JobInfo", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "job_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "pipeline_options", kind: "message", T: () => Struct }, + { no: 4, name: "state", kind: "enum", T: () => ["org.apache.beam.model.job_management.v1.JobState.Enum", JobState_Enum] } + ]); + } + create(value?: PartialMessage): JobInfo { + const message = { jobId: "", jobName: "", state: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobInfo): JobInfo { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + case /* string job_name */ 2: + message.jobName = reader.string(); + break; + case /* google.protobuf.Struct pipeline_options */ 3: + message.pipelineOptions = Struct.internalBinaryRead(reader, reader.uint32(), options, message.pipelineOptions); + break; + case /* org.apache.beam.model.job_management.v1.JobState.Enum state */ 4: + message.state = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobInfo, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + /* string job_name = 2; */ + if (message.jobName !== "") + writer.tag(2, WireType.LengthDelimited).string(message.jobName); + /* google.protobuf.Struct pipeline_options = 3; */ + if (message.pipelineOptions) + Struct.internalBinaryWrite(message.pipelineOptions, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.job_management.v1.JobState.Enum state = 4; */ + if (message.state !== 0) + writer.tag(4, WireType.Varint).int32(message.state); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.JobInfo + */ +export const JobInfo = new JobInfo$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobsRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobsRequest", []); + } + create(value?: PartialMessage): GetJobsRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobsRequest): GetJobsRequest { + return target ?? this.create(); + } + internalBinaryWrite(message: GetJobsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobsRequest + */ +export const GetJobsRequest = new GetJobsRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobsResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobsResponse", [ + { no: 1, name: "job_info", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => JobInfo } + ]); + } + create(value?: PartialMessage): GetJobsResponse { + const message = { jobInfo: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobsResponse): GetJobsResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.job_management.v1.JobInfo job_info */ 1: + message.jobInfo.push(JobInfo.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetJobsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.job_management.v1.JobInfo job_info = 1; */ + for (let i = 0; i < message.jobInfo.length; i++) + JobInfo.internalBinaryWrite(message.jobInfo[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobsResponse + */ +export const GetJobsResponse = new GetJobsResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobStateRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobStateRequest", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetJobStateRequest { + const message = { jobId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobStateRequest): GetJobStateRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetJobStateRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobStateRequest + */ +export const GetJobStateRequest = new GetJobStateRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobStateEvent$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.JobStateEvent", [ + { no: 1, name: "state", kind: "enum", T: () => ["org.apache.beam.model.job_management.v1.JobState.Enum", JobState_Enum] }, + { no: 2, name: "timestamp", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): JobStateEvent { + const message = { state: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobStateEvent): JobStateEvent { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.JobState.Enum state */ 1: + message.state = reader.int32(); + break; + case /* google.protobuf.Timestamp timestamp */ 2: + message.timestamp = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.timestamp); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobStateEvent, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.JobState.Enum state = 1; */ + if (message.state !== 0) + writer.tag(1, WireType.Varint).int32(message.state); + /* google.protobuf.Timestamp timestamp = 2; */ + if (message.timestamp) + Timestamp.internalBinaryWrite(message.timestamp, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.JobStateEvent + */ +export const JobStateEvent = new JobStateEvent$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobPipelineRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobPipelineRequest", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetJobPipelineRequest { + const message = { jobId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobPipelineRequest): GetJobPipelineRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetJobPipelineRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobPipelineRequest + */ +export const GetJobPipelineRequest = new GetJobPipelineRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobPipelineResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobPipelineResponse", [ + { no: 1, name: "pipeline", kind: "message", T: () => Pipeline } + ]); + } + create(value?: PartialMessage): GetJobPipelineResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobPipelineResponse): GetJobPipelineResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Pipeline pipeline */ 1: + message.pipeline = Pipeline.internalBinaryRead(reader, reader.uint32(), options, message.pipeline); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetJobPipelineResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Pipeline pipeline = 1; */ + if (message.pipeline) + Pipeline.internalBinaryWrite(message.pipeline, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobPipelineResponse + */ +export const GetJobPipelineResponse = new GetJobPipelineResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobMessagesRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.JobMessagesRequest", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): JobMessagesRequest { + const message = { jobId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobMessagesRequest): JobMessagesRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobMessagesRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.JobMessagesRequest + */ +export const JobMessagesRequest = new JobMessagesRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobMessage$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.JobMessage", [ + { no: 1, name: "message_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "time", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "importance", kind: "enum", T: () => ["org.apache.beam.model.job_management.v1.JobMessage.MessageImportance", JobMessage_MessageImportance] }, + { no: 4, name: "message_text", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): JobMessage { + const message = { messageId: "", time: "", importance: 0, messageText: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobMessage): JobMessage { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string message_id */ 1: + message.messageId = reader.string(); + break; + case /* string time */ 2: + message.time = reader.string(); + break; + case /* org.apache.beam.model.job_management.v1.JobMessage.MessageImportance importance */ 3: + message.importance = reader.int32(); + break; + case /* string message_text */ 4: + message.messageText = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobMessage, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string message_id = 1; */ + if (message.messageId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.messageId); + /* string time = 2; */ + if (message.time !== "") + writer.tag(2, WireType.LengthDelimited).string(message.time); + /* org.apache.beam.model.job_management.v1.JobMessage.MessageImportance importance = 3; */ + if (message.importance !== 0) + writer.tag(3, WireType.Varint).int32(message.importance); + /* string message_text = 4; */ + if (message.messageText !== "") + writer.tag(4, WireType.LengthDelimited).string(message.messageText); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.JobMessage + */ +export const JobMessage = new JobMessage$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobMessagesResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.JobMessagesResponse", [ + { no: 1, name: "message_response", kind: "message", oneof: "response", T: () => JobMessage }, + { no: 2, name: "state_response", kind: "message", oneof: "response", T: () => JobStateEvent } + ]); + } + create(value?: PartialMessage): JobMessagesResponse { + const message = { response: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobMessagesResponse): JobMessagesResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.JobMessage message_response */ 1: + message.response = { + oneofKind: "messageResponse", + messageResponse: JobMessage.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).messageResponse) + }; + break; + case /* org.apache.beam.model.job_management.v1.JobStateEvent state_response */ 2: + message.response = { + oneofKind: "stateResponse", + stateResponse: JobStateEvent.internalBinaryRead(reader, reader.uint32(), options, (message.response as any).stateResponse) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JobMessagesResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.JobMessage message_response = 1; */ + if (message.response.oneofKind === "messageResponse") + JobMessage.internalBinaryWrite(message.response.messageResponse, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.job_management.v1.JobStateEvent state_response = 2; */ + if (message.response.oneofKind === "stateResponse") + JobStateEvent.internalBinaryWrite(message.response.stateResponse, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.JobMessagesResponse + */ +export const JobMessagesResponse = new JobMessagesResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JobState$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.JobState", []); + } + create(value?: PartialMessage): JobState { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JobState): JobState { + return target ?? this.create(); + } + internalBinaryWrite(message: JobState, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.JobState + */ +export const JobState = new JobState$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobMetricsRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobMetricsRequest", [ + { no: 1, name: "job_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): GetJobMetricsRequest { + const message = { jobId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobMetricsRequest): GetJobMetricsRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string job_id */ 1: + message.jobId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetJobMetricsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string job_id = 1; */ + if (message.jobId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.jobId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobMetricsRequest + */ +export const GetJobMetricsRequest = new GetJobMetricsRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetJobMetricsResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.GetJobMetricsResponse", [ + { no: 1, name: "metrics", kind: "message", T: () => MetricResults } + ]); + } + create(value?: PartialMessage): GetJobMetricsResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetJobMetricsResponse): GetJobMetricsResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.job_management.v1.MetricResults metrics */ 1: + message.metrics = MetricResults.internalBinaryRead(reader, reader.uint32(), options, message.metrics); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetJobMetricsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.job_management.v1.MetricResults metrics = 1; */ + if (message.metrics) + MetricResults.internalBinaryWrite(message.metrics, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.GetJobMetricsResponse + */ +export const GetJobMetricsResponse = new GetJobMetricsResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MetricResults$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.MetricResults", [ + { no: 1, name: "attempted", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => MonitoringInfo }, + { no: 2, name: "committed", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => MonitoringInfo } + ]); + } + create(value?: PartialMessage): MetricResults { + const message = { attempted: [], committed: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MetricResults): MetricResults { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo attempted */ 1: + message.attempted.push(MonitoringInfo.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo committed */ 2: + message.committed.push(MonitoringInfo.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MetricResults, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo attempted = 1; */ + for (let i = 0; i < message.attempted.length; i++) + MonitoringInfo.internalBinaryWrite(message.attempted[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.MonitoringInfo committed = 2; */ + for (let i = 0; i < message.committed.length; i++) + MonitoringInfo.internalBinaryWrite(message.committed[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.MetricResults + */ +export const MetricResults = new MetricResults$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DescribePipelineOptionsRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.DescribePipelineOptionsRequest", []); + } + create(value?: PartialMessage): DescribePipelineOptionsRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DescribePipelineOptionsRequest): DescribePipelineOptionsRequest { + return target ?? this.create(); + } + internalBinaryWrite(message: DescribePipelineOptionsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.DescribePipelineOptionsRequest + */ +export const DescribePipelineOptionsRequest = new DescribePipelineOptionsRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PipelineOptionType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PipelineOptionType", []); + } + create(value?: PartialMessage): PipelineOptionType { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PipelineOptionType): PipelineOptionType { + return target ?? this.create(); + } + internalBinaryWrite(message: PipelineOptionType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PipelineOptionType + */ +export const PipelineOptionType = new PipelineOptionType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PipelineOptionDescriptor$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.PipelineOptionDescriptor", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "type", kind: "enum", T: () => ["org.apache.beam.model.job_management.v1.PipelineOptionType.Enum", PipelineOptionType_Enum] }, + { no: 3, name: "description", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "default_value", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "group", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): PipelineOptionDescriptor { + const message = { name: "", type: 0, description: "", defaultValue: "", group: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PipelineOptionDescriptor): PipelineOptionDescriptor { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + case /* org.apache.beam.model.job_management.v1.PipelineOptionType.Enum type */ 2: + message.type = reader.int32(); + break; + case /* string description */ 3: + message.description = reader.string(); + break; + case /* string default_value */ 4: + message.defaultValue = reader.string(); + break; + case /* string group */ 5: + message.group = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PipelineOptionDescriptor, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + /* org.apache.beam.model.job_management.v1.PipelineOptionType.Enum type = 2; */ + if (message.type !== 0) + writer.tag(2, WireType.Varint).int32(message.type); + /* string description = 3; */ + if (message.description !== "") + writer.tag(3, WireType.LengthDelimited).string(message.description); + /* string default_value = 4; */ + if (message.defaultValue !== "") + writer.tag(4, WireType.LengthDelimited).string(message.defaultValue); + /* string group = 5; */ + if (message.group !== "") + writer.tag(5, WireType.LengthDelimited).string(message.group); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.PipelineOptionDescriptor + */ +export const PipelineOptionDescriptor = new PipelineOptionDescriptor$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DescribePipelineOptionsResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.job_management.v1.DescribePipelineOptionsResponse", [ + { no: 1, name: "options", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => PipelineOptionDescriptor } + ]); + } + create(value?: PartialMessage): DescribePipelineOptionsResponse { + const message = { options: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DescribePipelineOptionsResponse): DescribePipelineOptionsResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.job_management.v1.PipelineOptionDescriptor options */ 1: + message.options.push(PipelineOptionDescriptor.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DescribePipelineOptionsResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.job_management.v1.PipelineOptionDescriptor options = 1; */ + for (let i = 0; i < message.options.length; i++) + PipelineOptionDescriptor.internalBinaryWrite(message.options[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.job_management.v1.DescribePipelineOptionsResponse + */ +export const DescribePipelineOptionsResponse = new DescribePipelineOptionsResponse$Type(); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.job_management.v1.JobService + */ +export const JobService = new ServiceType("org.apache.beam.model.job_management.v1.JobService", [ + { name: "Prepare", options: {}, I: PrepareJobRequest, O: PrepareJobResponse }, + { name: "Run", options: {}, I: RunJobRequest, O: RunJobResponse }, + { name: "GetJobs", options: {}, I: GetJobsRequest, O: GetJobsResponse }, + { name: "GetState", options: {}, I: GetJobStateRequest, O: JobStateEvent }, + { name: "GetPipeline", options: {}, I: GetJobPipelineRequest, O: GetJobPipelineResponse }, + { name: "Cancel", options: {}, I: CancelJobRequest, O: CancelJobResponse }, + { name: "GetStateStream", serverStreaming: true, options: {}, I: GetJobStateRequest, O: JobStateEvent }, + { name: "GetMessageStream", serverStreaming: true, options: {}, I: JobMessagesRequest, O: JobMessagesResponse }, + { name: "GetJobMetrics", options: {}, I: GetJobMetricsRequest, O: GetJobMetricsResponse }, + { name: "DescribePipelineOptions", options: {}, I: DescribePipelineOptionsRequest, O: DescribePipelineOptionsResponse } +]); diff --git a/sdks/node-ts/src/apache_beam/proto/beam_provision_api.client.ts b/sdks/node-ts/src/apache_beam/proto/beam_provision_api.client.ts new file mode 100644 index 000000000000..8778a3c5aadc --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_provision_api.client.ts @@ -0,0 +1,72 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_provision_api.proto" (package "org.apache.beam.model.fn_execution.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Provision API, for communicating with a runner +// for job and environment provisioning information over GRPC. +// +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { ProvisionService } from "./beam_provision_api"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { GetProvisionInfoResponse } from "./beam_provision_api"; +import type { GetProvisionInfoRequest } from "./beam_provision_api"; +import type { UnaryCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +/** + * A service to provide runtime provisioning information to the SDK harness + * worker instances -- such as pipeline options, resource constraints and + * other job metadata -- needed by an SDK harness instance to initialize. + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.ProvisionService + */ +export interface IProvisionServiceClient { + /** + * Get provision information for the SDK harness worker instance. + * + * @generated from protobuf rpc: GetProvisionInfo(org.apache.beam.model.fn_execution.v1.GetProvisionInfoRequest) returns (org.apache.beam.model.fn_execution.v1.GetProvisionInfoResponse); + */ + getProvisionInfo(input: GetProvisionInfoRequest, options?: RpcOptions): UnaryCall; +} +/** + * A service to provide runtime provisioning information to the SDK harness + * worker instances -- such as pipeline options, resource constraints and + * other job metadata -- needed by an SDK harness instance to initialize. + * + * @generated from protobuf service org.apache.beam.model.fn_execution.v1.ProvisionService + */ +export class ProvisionServiceClient implements IProvisionServiceClient, ServiceInfo { + typeName = ProvisionService.typeName; + methods = ProvisionService.methods; + options = ProvisionService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * Get provision information for the SDK harness worker instance. + * + * @generated from protobuf rpc: GetProvisionInfo(org.apache.beam.model.fn_execution.v1.GetProvisionInfoRequest) returns (org.apache.beam.model.fn_execution.v1.GetProvisionInfoResponse); + */ + getProvisionInfo(input: GetProvisionInfoRequest, options?: RpcOptions): UnaryCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("unary", this._transport, method, opt, input); + } +} diff --git a/sdks/node-ts/src/apache_beam/proto/beam_provision_api.ts b/sdks/node-ts/src/apache_beam/proto/beam_provision_api.ts new file mode 100644 index 000000000000..737bf2044d79 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_provision_api.ts @@ -0,0 +1,342 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_provision_api.proto" (package "org.apache.beam.model.fn_execution.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Provision API, for communicating with a runner +// for job and environment provisioning information over GRPC. +// +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { ArtifactInformation } from "./beam_runner_api"; +import { ApiServiceDescriptor } from "./endpoints"; +import { Struct } from "./google/protobuf/struct"; +/** + * A request to get the provision info of a SDK harness worker instance. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.GetProvisionInfoRequest + */ +export interface GetProvisionInfoRequest { +} +/** + * A response containing the provision info of a SDK harness worker instance. + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.GetProvisionInfoResponse + */ +export interface GetProvisionInfoResponse { + /** + * @generated from protobuf field: org.apache.beam.model.fn_execution.v1.ProvisionInfo info = 1; + */ + info?: ProvisionInfo; +} +/** + * Runtime provisioning information for a SDK harness worker instance, + * such as pipeline options, resource constraints and other job metadata + * + * @generated from protobuf message org.apache.beam.model.fn_execution.v1.ProvisionInfo + */ +export interface ProvisionInfo { + /** + * (required) Pipeline options. For non-template jobs, the options are + * identical to what is passed to job submission. + * + * @generated from protobuf field: google.protobuf.Struct pipeline_options = 3; + */ + pipelineOptions?: Struct; + /** + * (required) The artifact retrieval token produced by + * LegacyArtifactStagingService.CommitManifestResponse. + * + * @generated from protobuf field: string retrieval_token = 6; + */ + retrievalToken: string; + /** + * (optional) The endpoint that the runner is hosting for the SDK to submit + * status reports to during pipeline execution. This field will only be + * populated if the runner supports SDK status reports. For more details see + * https://s.apache.org/beam-fn-api-harness-status + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor status_endpoint = 7; + */ + statusEndpoint?: ApiServiceDescriptor; + /** + * (optional) The logging endpoint this SDK should use. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor logging_endpoint = 8; + */ + loggingEndpoint?: ApiServiceDescriptor; + /** + * (optional) The artifact retrieval endpoint this SDK should use. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_endpoint = 9; + */ + artifactEndpoint?: ApiServiceDescriptor; + /** + * (optional) The control endpoint this SDK should use. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor control_endpoint = 10; + */ + controlEndpoint?: ApiServiceDescriptor; + /** + * The set of dependencies that should be staged into this environment. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ArtifactInformation dependencies = 11; + */ + dependencies: ArtifactInformation[]; + /** + * (optional) A set of capabilities that this SDK is allowed to use in its + * interactions with this runner. + * + * @generated from protobuf field: repeated string runner_capabilities = 12; + */ + runnerCapabilities: string[]; + /** + * (optional) Runtime environment metadata that are static throughout the + * pipeline execution. + * + * @generated from protobuf field: map metadata = 13; + */ + metadata: { + [key: string]: string; + }; + /** + * (optional) If this environment supports SIBLING_WORKERS, used to indicate + * the ids of sibling workers, if any, that should be started in addition + * to this worker (which already has its own worker id). + * + * @generated from protobuf field: repeated string sibling_worker_ids = 14; + */ + siblingWorkerIds: string[]; +} +// @generated message type with reflection information, may provide speed optimized methods +class GetProvisionInfoRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.GetProvisionInfoRequest", []); + } + create(value?: PartialMessage): GetProvisionInfoRequest { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetProvisionInfoRequest): GetProvisionInfoRequest { + return target ?? this.create(); + } + internalBinaryWrite(message: GetProvisionInfoRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.GetProvisionInfoRequest + */ +export const GetProvisionInfoRequest = new GetProvisionInfoRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GetProvisionInfoResponse$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.GetProvisionInfoResponse", [ + { no: 1, name: "info", kind: "message", T: () => ProvisionInfo } + ]); + } + create(value?: PartialMessage): GetProvisionInfoResponse { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GetProvisionInfoResponse): GetProvisionInfoResponse { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.fn_execution.v1.ProvisionInfo info */ 1: + message.info = ProvisionInfo.internalBinaryRead(reader, reader.uint32(), options, message.info); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GetProvisionInfoResponse, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.fn_execution.v1.ProvisionInfo info = 1; */ + if (message.info) + ProvisionInfo.internalBinaryWrite(message.info, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.GetProvisionInfoResponse + */ +export const GetProvisionInfoResponse = new GetProvisionInfoResponse$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProvisionInfo$Type extends MessageType { + constructor() { + super("org.apache.beam.model.fn_execution.v1.ProvisionInfo", [ + { no: 3, name: "pipeline_options", kind: "message", T: () => Struct }, + { no: 6, name: "retrieval_token", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 7, name: "status_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 8, name: "logging_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 9, name: "artifact_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 10, name: "control_endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 11, name: "dependencies", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ArtifactInformation }, + { no: 12, name: "runner_capabilities", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 13, name: "metadata", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } }, + { no: 14, name: "sibling_worker_ids", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ProvisionInfo { + const message = { retrievalToken: "", dependencies: [], runnerCapabilities: [], metadata: {}, siblingWorkerIds: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProvisionInfo): ProvisionInfo { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* google.protobuf.Struct pipeline_options */ 3: + message.pipelineOptions = Struct.internalBinaryRead(reader, reader.uint32(), options, message.pipelineOptions); + break; + case /* string retrieval_token */ 6: + message.retrievalToken = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor status_endpoint */ 7: + message.statusEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.statusEndpoint); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor logging_endpoint */ 8: + message.loggingEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.loggingEndpoint); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_endpoint */ 9: + message.artifactEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.artifactEndpoint); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor control_endpoint */ 10: + message.controlEndpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.controlEndpoint); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation dependencies */ 11: + message.dependencies.push(ArtifactInformation.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated string runner_capabilities */ 12: + message.runnerCapabilities.push(reader.string()); + break; + case /* map metadata */ 13: + this.binaryReadMap13(message.metadata, reader, options); + break; + case /* repeated string sibling_worker_ids */ 14: + message.siblingWorkerIds.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap13(map: ProvisionInfo["metadata"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProvisionInfo["metadata"] | undefined, val: ProvisionInfo["metadata"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.fn_execution.v1.ProvisionInfo.metadata"); + } + } + map[key ?? ""] = val ?? ""; + } + internalBinaryWrite(message: ProvisionInfo, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* google.protobuf.Struct pipeline_options = 3; */ + if (message.pipelineOptions) + Struct.internalBinaryWrite(message.pipelineOptions, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* string retrieval_token = 6; */ + if (message.retrievalToken !== "") + writer.tag(6, WireType.LengthDelimited).string(message.retrievalToken); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor status_endpoint = 7; */ + if (message.statusEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.statusEndpoint, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor logging_endpoint = 8; */ + if (message.loggingEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.loggingEndpoint, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor artifact_endpoint = 9; */ + if (message.artifactEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.artifactEndpoint, writer.tag(9, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor control_endpoint = 10; */ + if (message.controlEndpoint) + ApiServiceDescriptor.internalBinaryWrite(message.controlEndpoint, writer.tag(10, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation dependencies = 11; */ + for (let i = 0; i < message.dependencies.length; i++) + ArtifactInformation.internalBinaryWrite(message.dependencies[i], writer.tag(11, WireType.LengthDelimited).fork(), options).join(); + /* repeated string runner_capabilities = 12; */ + for (let i = 0; i < message.runnerCapabilities.length; i++) + writer.tag(12, WireType.LengthDelimited).string(message.runnerCapabilities[i]); + /* map metadata = 13; */ + for (let k of Object.keys(message.metadata)) + writer.tag(13, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.metadata[k]).join(); + /* repeated string sibling_worker_ids = 14; */ + for (let i = 0; i < message.siblingWorkerIds.length; i++) + writer.tag(14, WireType.LengthDelimited).string(message.siblingWorkerIds[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.fn_execution.v1.ProvisionInfo + */ +export const ProvisionInfo = new ProvisionInfo$Type(); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.fn_execution.v1.ProvisionService + */ +export const ProvisionService = new ServiceType("org.apache.beam.model.fn_execution.v1.ProvisionService", [ + { name: "GetProvisionInfo", options: {}, I: GetProvisionInfoRequest, O: GetProvisionInfoResponse } +]); diff --git a/sdks/node-ts/src/apache_beam/proto/beam_runner_api.client.ts b/sdks/node-ts/src/apache_beam/proto/beam_runner_api.client.ts new file mode 100644 index 000000000000..0ebc5d005e94 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_runner_api.client.ts @@ -0,0 +1,64 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_runner_api.proto" (package "org.apache.beam.model.pipeline.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Runner API, which is the runner-independent, +// SDK-independent definition of the Beam model. +// +import type { RpcTransport } from "@protobuf-ts/runtime-rpc"; +import type { ServiceInfo } from "@protobuf-ts/runtime-rpc"; +import { TestStreamService } from "./beam_runner_api"; +import { stackIntercept } from "@protobuf-ts/runtime-rpc"; +import type { TestStreamPayload_Event } from "./beam_runner_api"; +import type { EventsRequest } from "./beam_runner_api"; +import type { ServerStreamingCall } from "@protobuf-ts/runtime-rpc"; +import type { RpcOptions } from "@protobuf-ts/runtime-rpc"; +/** + * @generated from protobuf service org.apache.beam.model.pipeline.v1.TestStreamService + */ +export interface ITestStreamServiceClient { + /** + * A TestStream will request for events using this RPC. + * + * @generated from protobuf rpc: Events(org.apache.beam.model.pipeline.v1.EventsRequest) returns (stream org.apache.beam.model.pipeline.v1.TestStreamPayload.Event); + */ + events(input: EventsRequest, options?: RpcOptions): ServerStreamingCall; +} +/** + * @generated from protobuf service org.apache.beam.model.pipeline.v1.TestStreamService + */ +export class TestStreamServiceClient implements ITestStreamServiceClient, ServiceInfo { + typeName = TestStreamService.typeName; + methods = TestStreamService.methods; + options = TestStreamService.options; + constructor(private readonly _transport: RpcTransport) { + } + /** + * A TestStream will request for events using this RPC. + * + * @generated from protobuf rpc: Events(org.apache.beam.model.pipeline.v1.EventsRequest) returns (stream org.apache.beam.model.pipeline.v1.TestStreamPayload.Event); + */ + events(input: EventsRequest, options?: RpcOptions): ServerStreamingCall { + const method = this.methods[0], opt = this._transport.mergeOptions(options); + return stackIntercept("serverStreaming", this._transport, method, opt, input); + } +} diff --git a/sdks/node-ts/src/apache_beam/proto/beam_runner_api.ts b/sdks/node-ts/src/apache_beam/proto/beam_runner_api.ts new file mode 100644 index 000000000000..edb945e41e12 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/beam_runner_api.ts @@ -0,0 +1,8149 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "beam_runner_api.proto" (package "org.apache.beam.model.pipeline.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the Runner API, which is the runner-independent, +// SDK-independent definition of the Beam model. +// +import { ServiceType } from "@protobuf-ts/runtime-rpc"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { ApiServiceDescriptor } from "./endpoints"; +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.BeamConstants + */ +export interface BeamConstants { +} +/** + * All timestamps in milliseconds since Jan 1, 1970. + * + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.BeamConstants.Constants + */ +export enum BeamConstants_Constants { + /** + * All timestamps of elements or window boundaries must be within + * the interval [MIN_TIMESTAMP_MILLIS, MAX_TIMESTAMP_MILLIS]. + * The smallest representable timestamp of an element or a window boundary. + * + * @generated from protobuf enum value: MIN_TIMESTAMP_MILLIS = 0; + */ + MIN_TIMESTAMP_MILLIS = 0, + /** + * The largest representable timestamp of an element or a window boundary. + * + * @generated from protobuf enum value: MAX_TIMESTAMP_MILLIS = 1; + */ + MAX_TIMESTAMP_MILLIS = 1, + /** + * The maximum timestamp for the global window. + * Triggers use max timestamp to set timers' timestamp. Timers fire when + * the watermark passes their timestamps. So, the timestamp needs to be + * smaller than the MAX_TIMESTAMP_MILLIS. + * One standard day is subtracted from MAX_TIMESTAMP_MILLIS to make sure + * the max timestamp is smaller than MAX_TIMESTAMP_MILLIS even after rounding up + * to seconds or minutes. + * + * @generated from protobuf enum value: GLOBAL_WINDOW_MAX_TIMESTAMP_MILLIS = 2; + */ + GLOBAL_WINDOW_MAX_TIMESTAMP_MILLIS = 2 +} +/** + * A set of mappings from id to message. This is included as an optional field + * on any proto message that may contain references needing resolution. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Components + */ +export interface Components { + /** + * (Required) A map from pipeline-scoped id to PTransform. + * + * Keys of the transforms map may be used by runners to identify pipeline + * steps. Hence it's recommended to use strings that are not too long that + * match regex '[A-Za-z0-9-_]+'. + * + * @generated from protobuf field: map transforms = 1; + */ + transforms: { + [key: string]: PTransform; + }; + /** + * (Required) A map from pipeline-scoped id to PCollection. + * + * @generated from protobuf field: map pcollections = 2; + */ + pcollections: { + [key: string]: PCollection; + }; + /** + * (Required) A map from pipeline-scoped id to WindowingStrategy. + * + * @generated from protobuf field: map windowing_strategies = 3; + */ + windowingStrategies: { + [key: string]: WindowingStrategy; + }; + /** + * (Required) A map from pipeline-scoped id to Coder. + * + * @generated from protobuf field: map coders = 4; + */ + coders: { + [key: string]: Coder; + }; + /** + * (Required) A map from pipeline-scoped id to Environment. + * + * @generated from protobuf field: map environments = 5; + */ + environments: { + [key: string]: Environment; + }; +} +/** + * A Pipeline is a hierarchical graph of PTransforms, linked + * by PCollections. A typical graph may look like: + * + * Impulse -> PCollection -> ParDo -> PCollection -> GroupByKey -> ... + * \> PCollection -> ParDo -> ... + * \> ParDo -> ... + * Impulse -> PCollection -> ParDo -> PCollection -> ... + * + * This is represented by a number of by-reference maps to transforms, + * PCollections, SDK environments, coders, etc., for + * supporting compact reuse and arbitrary graph structure. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Pipeline + */ +export interface Pipeline { + /** + * (Required) The coders, UDFs, graph nodes, etc, that make up + * this pipeline. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Components components = 1; + */ + components?: Components; + /** + * (Required) The ids of all PTransforms that are not contained within another + * PTransform. These must be in shallow topological order, so that traversing + * them recursively in this order yields a recursively topological traversal. + * + * @generated from protobuf field: repeated string root_transform_ids = 2; + */ + rootTransformIds: string[]; + /** + * (Optional) Static display data for the pipeline. If there is none, + * it may be omitted. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 3; + */ + displayData: DisplayData[]; + /** + * (Optional) A set of requirements that the runner MUST understand and be + * able to faithfully provide in order to execute this pipeline. These + * may indicate that a runner must inspect new fields on a component or + * provide additional guarantees when processing specific transforms. + * A runner should reject any pipelines with unknown requirements. + * + * @generated from protobuf field: repeated string requirements = 4; + */ + requirements: string[]; +} +/** + * Transforms are the operations in your pipeline, and provide a generic + * processing framework. You provide processing logic in the form of a function + * object (colloquially referred to as “user code”), and your user code is + * applied to each element of an input PCollection (or more than one + * PCollection). Depending on the pipeline runner and back-end that you choose, + * many different workers across a cluster may execute instances of your user + * code in parallel. The user code running on each worker generates the output + * elements that are ultimately added to the final output PCollection that the + * transform produces. + * + * The Beam SDKs contain a number of different transforms that you can apply to + * your pipeline’s PCollections. These include general-purpose core transforms, + * such as ParDo or Combine. There are also pre-written composite transforms + * included in the SDKs, which combine one or more of the core transforms in a + * useful processing pattern, such as counting or combining elements in a + * collection. You can also define your own more complex composite transforms to + * fit your pipeline’s exact use case. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.PTransform + */ +export interface PTransform { + /** + * (Required) A unique name for the application node. + * + * Ideally, this should be stable over multiple evolutions of a pipeline + * for the purposes of logging and associating pipeline state with a node, + * etc. + * + * If it is not stable, then the runner decides what will happen. But, most + * importantly, it must always be here and be unique, even if it is + * autogenerated. + * + * @generated from protobuf field: string unique_name = 5; + */ + uniqueName: string; + /** + * (Optional) A URN and payload that, together, fully defined the semantics + * of this transform. + * + * If absent, this must be an "anonymous" composite transform. + * + * For primitive transform in the Runner API, this is required, and the + * payloads are well-defined messages. When the URN indicates ParDo it + * is a ParDoPayload, and so on. For some special composite transforms, + * the payload is also officially defined. See StandardPTransforms for + * details. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec spec = 1; + */ + spec?: FunctionSpec; + /** + * (Optional) A list of the ids of transforms that it contains. + * + * Primitive transforms (see StandardPTransforms.Primitives) are not allowed + * to specify subtransforms. + * + * Note that a composite transform may have zero subtransforms as long as it + * only outputs PCollections that are in its inputs. + * + * @generated from protobuf field: repeated string subtransforms = 2; + */ + subtransforms: string[]; + /** + * (Required) A map from local names of inputs (unique only with this map, and + * likely embedded in the transform payload and serialized user code) to + * PCollection ids. + * + * The payload for this transform may clarify the relationship of these + * inputs. For example: + * + * - for a Flatten transform they are merged + * - for a ParDo transform, some may be side inputs + * + * All inputs are recorded here so that the topological ordering of + * the graph is consistent whether or not the payload is understood. + * + * @generated from protobuf field: map inputs = 3; + */ + inputs: { + [key: string]: string; + }; + /** + * (Required) A map from local names of outputs (unique only within this map, + * and likely embedded in the transform payload and serialized user code) + * to PCollection ids. + * + * The URN or payload for this transform node may clarify the type and + * relationship of these outputs. For example: + * + * - for a ParDo transform, these are tags on PCollections, which will be + * embedded in the DoFn. + * + * @generated from protobuf field: map outputs = 4; + */ + outputs: { + [key: string]: string; + }; + /** + * (Optional) Static display data for this PTransform application. If + * there is none, it may be omitted. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 6; + */ + displayData: DisplayData[]; + /** + * Environment where the current PTransform should be executed in. + * + * Transforms that are required to be implemented by a runner must omit this. + * All other transforms are required to specify this. + * + * @generated from protobuf field: string environment_id = 7; + */ + environmentId: string; + /** + * (Optional) A map from URNs designating a type of annotation, to the + * annotation in binary format. For example, an annotation could indicate + * that this PTransform has specific privacy properties. + * + * A runner MAY ignore types of annotations it doesn't understand. Therefore + * annotations MUST NOT be used for metadata that can affect correct + * execution of the transform. + * + * @generated from protobuf field: map annotations = 8; + */ + annotations: { + [key: string]: Uint8Array; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardPTransforms + */ +export interface StandardPTransforms { +} +/** + * Primitive transforms may not specify composite sub-transforms. + * + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardPTransforms.Primitives + */ +export enum StandardPTransforms_Primitives { + /** + * ParDo is a Beam transform for generic parallel processing. The ParDo + * processing paradigm is similar to the “Map” phase of a + * Map/Shuffle/Reduce-style algorithm: a ParDo transform considers each + * element in the input PCollection, performs some processing function + * (your user code) on that element, and emits zero, one, or multiple + * elements to an output PCollection. + * + * See https://beam.apache.org/documentation/programming-guide/#pardo + * for additional details. + * + * Payload: ParDoPayload + * + * @generated from protobuf enum value: PAR_DO = 0; + */ + PAR_DO = 0, + /** + * Flatten is a Beam transform for PCollection objects that store the same + * data type. Flatten merges multiple PCollection objects into a single + * logical PCollection. + * + * See https://beam.apache.org/documentation/programming-guide/#flatten + * for additional details. + * + * Payload: None + * + * @generated from protobuf enum value: FLATTEN = 1; + */ + FLATTEN = 1, + /** + * GroupByKey is a Beam transform for processing collections of key/value + * pairs. It’s a parallel reduction operation, analogous to the Shuffle + * phase of a Map/Shuffle/Reduce-style algorithm. The input to GroupByKey is + * a collection of key/value pairs that represents a multimap, where the + * collection contains multiple pairs that have the same key, but different + * values. Given such a collection, you use GroupByKey to collect all of the + * values associated with each unique key. + * + * See https://beam.apache.org/documentation/programming-guide/#groupbykey + * for additional details. + * + * Never defines an environment as the runner is required to implement this + * transform. + * + * Payload: None + * + * @generated from protobuf enum value: GROUP_BY_KEY = 2; + */ + GROUP_BY_KEY = 2, + /** + * A transform which produces a single empty byte array at the minimum + * timestamp in the GlobalWindow. + * + * Never defines an environment as the runner is required to implement this + * transform. + * + * Payload: None + * + * @generated from protobuf enum value: IMPULSE = 3; + */ + IMPULSE = 3, + /** + * Windowing subdivides a PCollection according to the timestamps of its + * individual elements. Transforms that aggregate multiple elements, such as + * GroupByKey and Combine, work implicitly on a per-window basis — they + * process each PCollection as a succession of multiple, finite windows, + * though the entire collection itself may be of unbounded size. + * + * See https://beam.apache.org/documentation/programming-guide/#windowing + * for additional details. + * + * Payload: WindowIntoPayload + * + * @generated from protobuf enum value: ASSIGN_WINDOWS = 4; + */ + ASSIGN_WINDOWS = 4, + /** + * A testing input that generates an unbounded {@link PCollection} of + * elements, advancing the watermark and processing time as elements are + * emitted. After all of the specified elements are emitted, ceases to + * produce output. + * + * See https://beam.apache.org/blog/2016/10/20/test-stream.html + * for additional details. + * + * Payload: TestStreamPayload + * + * @generated from protobuf enum value: TEST_STREAM = 5; + */ + TEST_STREAM = 5, + /** + * Represents mapping of main input window onto side input window. + * + * Side input window mapping function: + * Input: KV + * Output: KV + * + * For each main input window, the side input window is returned. The + * nonce is used by a runner to associate each input with its output. + * The nonce is represented as an opaque set of bytes. + * + * Payload: SideInput#window_mapping_fn FunctionSpec + * + * @generated from protobuf enum value: MAP_WINDOWS = 6; + */ + MAP_WINDOWS = 6, + /** + * Used to merge windows during a GroupByKey. + * + * Window merging function: + * Input: KV> + * Output: KV, iterable>>> + * + * For each set of original windows, a list of all unmerged windows is + * output alongside a map of merged window to set of consumed windows. + * All original windows must be contained in either the unmerged original + * window set or one of the consumed original window sets. Each original + * window can only be part of one output set. The nonce is used by a runner + * to associate each input with its output. The nonce is represented as an + * opaque set of bytes. + * + * Payload: WindowingStrategy#window_fn FunctionSpec + * + * @generated from protobuf enum value: MERGE_WINDOWS = 7; + */ + MERGE_WINDOWS = 7, + /** + * A transform that translates a given element to its human-readable + * representation. + * + * Input: KV + * Output: KV + * + * For each given element, the implementation returns the best-effort + * human-readable representation. When possible, the implementation could + * call a user-overridable method on the type. For example, Java could + * call `toString()`, Python could call `str()`, Golang could call + * `String()`. The nonce is used by a runner to associate each input with + * its output. The nonce is represented as an opaque set of bytes. + * + * Payload: none + * + * @generated from protobuf enum value: TO_STRING = 8; + */ + TO_STRING = 8 +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardPTransforms.DeprecatedPrimitives + */ +export enum StandardPTransforms_DeprecatedPrimitives { + /** + * Represents the operation to read a Bounded or Unbounded source. + * Payload: ReadPayload. + * + * @generated from protobuf enum value: READ = 0; + */ + READ = 0, + /** + * Runners should move away from translating `CreatePCollectionView` and treat this as + * part of the translation for a `ParDo` side input. + * + * @generated from protobuf enum value: CREATE_VIEW = 1; + */ + CREATE_VIEW = 1 +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardPTransforms.Composites + */ +export enum StandardPTransforms_Composites { + /** + * Represents the Combine.perKey() operation. + * If this is produced by an SDK, it is assumed that the SDK understands + * each of CombineComponents. + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_PER_KEY = 0; + */ + COMBINE_PER_KEY = 0, + /** + * Represents the Combine.globally() operation. + * If this is produced by an SDK, it is assumed that the SDK understands + * each of CombineComponents. + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_GLOBALLY = 1; + */ + COMBINE_GLOBALLY = 1, + /** + * Represents the Reshuffle operation. + * + * @generated from protobuf enum value: RESHUFFLE = 2; + */ + RESHUFFLE = 2, + /** + * Less well-known. Payload: WriteFilesPayload. + * + * @generated from protobuf enum value: WRITE_FILES = 3; + */ + WRITE_FILES = 3, + /** + * Payload: PubSubReadPayload. + * + * @generated from protobuf enum value: PUBSUB_READ = 4; + */ + PUBSUB_READ = 4, + /** + * Payload: PubSubWritePayload. + * + * @generated from protobuf enum value: PUBSUB_WRITE = 5; + */ + PUBSUB_WRITE = 5, + /** + * Represents the GroupIntoBatches.WithShardedKey operation. + * Payload: GroupIntoBatchesPayload + * + * @generated from protobuf enum value: GROUP_INTO_BATCHES_WITH_SHARDED_KEY = 6; + */ + GROUP_INTO_BATCHES_WITH_SHARDED_KEY = 6 +} +/** + * Payload for all of these: CombinePayload + * + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardPTransforms.CombineComponents + */ +export enum StandardPTransforms_CombineComponents { + /** + * Represents the Pre-Combine part of a lifted Combine Per Key, as described + * in the following document: + * https://s.apache.org/beam-runner-api-combine-model#heading=h.ta0g6ase8z07 + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_PER_KEY_PRECOMBINE = 0; + */ + COMBINE_PER_KEY_PRECOMBINE = 0, + /** + * Represents the Merge Accumulators part of a lifted Combine Per Key, as + * described in the following document: + * https://s.apache.org/beam-runner-api-combine-model#heading=h.jco9rvatld5m + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_PER_KEY_MERGE_ACCUMULATORS = 1; + */ + COMBINE_PER_KEY_MERGE_ACCUMULATORS = 1, + /** + * Represents the Extract Outputs part of a lifted Combine Per Key, as + * described in the following document: + * https://s.apache.org/beam-runner-api-combine-model#heading=h.i9i6p8gtl6ku + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_PER_KEY_EXTRACT_OUTPUTS = 2; + */ + COMBINE_PER_KEY_EXTRACT_OUTPUTS = 2, + /** + * Represents the Combine Grouped Values transform, as described in the + * following document: + * https://s.apache.org/beam-runner-api-combine-model#heading=h.aj86ew4v1wk + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_GROUPED_VALUES = 3; + */ + COMBINE_GROUPED_VALUES = 3, + /** + * Represents the Convert To Accumulators transform, as described in the + * following document: + * https://s.apache.org/beam-runner-api-combine-model#heading=h.h5697l1scd9x + * Payload: CombinePayload + * + * @generated from protobuf enum value: COMBINE_PER_KEY_CONVERT_TO_ACCUMULATORS = 4; + */ + COMBINE_PER_KEY_CONVERT_TO_ACCUMULATORS = 4 +} +/** + * Payload for all of these: ParDoPayload containing the user's SDF + * + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardPTransforms.SplittableParDoComponents + */ +export enum StandardPTransforms_SplittableParDoComponents { + /** + * Pairs the input element with its initial restriction. + * Input: element; output: KV(element, restriction). + * + * @generated from protobuf enum value: PAIR_WITH_RESTRICTION = 0; + */ + PAIR_WITH_RESTRICTION = 0, + /** + * Splits the restriction of each element/restriction pair and returns the + * resulting splits, with a corresponding floating point size estimation + * for each. + * + * A reasonable value for size is the number of bytes expected to be + * produced by this (element, restriction) pair. + * + * Input: KV(element, restriction) + * Output: KV(KV(element, restriction), size)) + * + * @generated from protobuf enum value: SPLIT_AND_SIZE_RESTRICTIONS = 1; + */ + SPLIT_AND_SIZE_RESTRICTIONS = 1, + /** + * Applies the DoFn to every element and restriction. + * + * All primary and residuals returned from checkpointing or splitting must + * have the same type as the input to this transform. + * + * Input: KV(KV(element, restriction), size); output: DoFn's output. + * + * @generated from protobuf enum value: PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS = 2; + */ + PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS = 2, + /** + * Truncates the restriction of each element/restriction pair and returns + * the finite restriction which will be processed when a pipeline is + * drained. See + * https://docs.google.com/document/d/1NExwHlj-2q2WUGhSO4jTu8XGhDPmm3cllSN8IMmWci8/edit#. + * for additional details about drain. + * + * Input: KV(KV(element, restriction), size); + * Output: KV(KV(element, restriction), size). + * + * @generated from protobuf enum value: TRUNCATE_SIZED_RESTRICTION = 3; + */ + TRUNCATE_SIZED_RESTRICTION = 3 +} +/** + * Payload for all of these: GroupIntoBatchesPayload + * + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardPTransforms.GroupIntoBatchesComponents + */ +export enum StandardPTransforms_GroupIntoBatchesComponents { + /** + * @generated from protobuf enum value: GROUP_INTO_BATCHES = 0; + */ + GROUP_INTO_BATCHES = 0 +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardSideInputTypes + */ +export interface StandardSideInputTypes { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardSideInputTypes.Enum + */ +export enum StandardSideInputTypes_Enum { + /** + * Represents a view over a PCollection. + * + * StateGetRequests performed on this side input must use + * StateKey.IterableSideInput. + * + * @generated from protobuf enum value: ITERABLE = 0; + */ + ITERABLE = 0, + /** + * Represents a view over a PCollection>. + * + * StateGetRequests performed on this side input must use + * StateKey.IterableSideInput or StateKey.MultimapSideInput. + * + * @generated from protobuf enum value: MULTIMAP = 1; + */ + MULTIMAP = 1 +} +/** + * A PCollection! + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.PCollection + */ +export interface PCollection { + /** + * (Required) A unique name for the PCollection. + * + * Ideally, this should be stable over multiple evolutions of a pipeline + * for the purposes of logging and associating pipeline state with a node, + * etc. + * + * If it is not stable, then the runner decides what will happen. But, most + * importantly, it must always be here, even if it is autogenerated. + * + * @generated from protobuf field: string unique_name = 1; + */ + uniqueName: string; + /** + * (Required) The id of the Coder for this PCollection. + * + * @generated from protobuf field: string coder_id = 2; + */ + coderId: string; + /** + * (Required) Whether this PCollection is bounded or unbounded + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded = 3; + */ + isBounded: IsBounded_Enum; + /** + * (Required) The id of the windowing strategy for this PCollection. + * + * @generated from protobuf field: string windowing_strategy_id = 4; + */ + windowingStrategyId: string; + /** + * (Optional) Static display data for the PCollection. If there is none, + * it may be omitted. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 5; + */ + displayData: DisplayData[]; +} +/** + * The payload for the primitive ParDo transform. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ParDoPayload + */ +export interface ParDoPayload { + /** + * (Required) The FunctionSpec of the DoFn. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec do_fn = 1; + */ + doFn?: FunctionSpec; + /** + * (Optional) A mapping of local input names to side inputs, describing + * the expected access pattern. + * + * @generated from protobuf field: map side_inputs = 3; + */ + sideInputs: { + [key: string]: SideInput; + }; + /** + * (Optional) A mapping of local state names to state specifications. + * If this is set, the stateful processing requirement should also + * be placed in the pipeline requirements. + * + * @generated from protobuf field: map state_specs = 4; + */ + stateSpecs: { + [key: string]: StateSpec; + }; + /** + * (Optional) A mapping of local timer family names to timer family + * specifications. If this is set, the stateful processing requirement should + * also be placed in the pipeline requirements. + * + * @generated from protobuf field: map timer_family_specs = 9; + */ + timerFamilySpecs: { + [key: string]: TimerFamilySpec; + }; + /** + * (Optional) Only set when this ParDo contains a splittable DoFn. + * If this is set, the corresponding standard requirement should also + * be placed in the pipeline requirements. + * + * @generated from protobuf field: string restriction_coder_id = 7; + */ + restrictionCoderId: string; + /** + * (Optional) Only set when this ParDo can request bundle finalization. + * If this is set, the corresponding standard requirement should also + * be placed in the pipeline requirements. + * + * @generated from protobuf field: bool requests_finalization = 8; + */ + requestsFinalization: boolean; + /** + * Whether this stage requires time sorted input. + * If this is set, the corresponding standard requirement should also + * be placed in the pipeline requirements. + * + * @generated from protobuf field: bool requires_time_sorted_input = 10; + */ + requiresTimeSortedInput: boolean; + /** + * Whether this stage requires stable input. + * If this is set, the corresponding standard requirement should also + * be placed in the pipeline requirements. + * + * @generated from protobuf field: bool requires_stable_input = 11; + */ + requiresStableInput: boolean; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StateSpec + */ +export interface StateSpec { + /** + * @generated from protobuf oneof: spec + */ + spec: { + oneofKind: "readModifyWriteSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ReadModifyWriteStateSpec read_modify_write_spec = 1; + */ + readModifyWriteSpec: ReadModifyWriteStateSpec; + } | { + oneofKind: "bagSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.BagStateSpec bag_spec = 2; + */ + bagSpec: BagStateSpec; + } | { + oneofKind: "combiningSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.CombiningStateSpec combining_spec = 3; + */ + combiningSpec: CombiningStateSpec; + } | { + oneofKind: "mapSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.MapStateSpec map_spec = 4; + */ + mapSpec: MapStateSpec; + } | { + oneofKind: "setSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.SetStateSpec set_spec = 5; + */ + setSpec: SetStateSpec; + } | { + oneofKind: "orderedListSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.OrderedListStateSpec ordered_list_spec = 6; + */ + orderedListSpec: OrderedListStateSpec; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ReadModifyWriteStateSpec + */ +export interface ReadModifyWriteStateSpec { + /** + * @generated from protobuf field: string coder_id = 1; + */ + coderId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.BagStateSpec + */ +export interface BagStateSpec { + /** + * @generated from protobuf field: string element_coder_id = 1; + */ + elementCoderId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.OrderedListStateSpec + */ +export interface OrderedListStateSpec { + /** + * @generated from protobuf field: string element_coder_id = 1; + */ + elementCoderId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.CombiningStateSpec + */ +export interface CombiningStateSpec { + /** + * @generated from protobuf field: string accumulator_coder_id = 1; + */ + accumulatorCoderId: string; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec combine_fn = 2; + */ + combineFn?: FunctionSpec; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MapStateSpec + */ +export interface MapStateSpec { + /** + * @generated from protobuf field: string key_coder_id = 1; + */ + keyCoderId: string; + /** + * @generated from protobuf field: string value_coder_id = 2; + */ + valueCoderId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.SetStateSpec + */ +export interface SetStateSpec { + /** + * @generated from protobuf field: string element_coder_id = 1; + */ + elementCoderId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TimerFamilySpec + */ +export interface TimerFamilySpec { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.TimeDomain.Enum time_domain = 1; + */ + timeDomain: TimeDomain_Enum; + /** + * @generated from protobuf field: string timer_family_coder_id = 2; + */ + timerFamilyCoderId: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.IsBounded + */ +export interface IsBounded { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.IsBounded.Enum + */ +export enum IsBounded_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * @generated from protobuf enum value: UNBOUNDED = 1; + */ + UNBOUNDED = 1, + /** + * @generated from protobuf enum value: BOUNDED = 2; + */ + BOUNDED = 2 +} +/** + * The payload for the primitive Read transform. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ReadPayload + */ +export interface ReadPayload { + /** + * (Required) The FunctionSpec of the source for this Read. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec source = 1; + */ + source?: FunctionSpec; + /** + * (Required) Whether the source is bounded or unbounded + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded = 2; + */ + isBounded: IsBounded_Enum; +} +/** + * The payload for the WindowInto transform. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.WindowIntoPayload + */ +export interface WindowIntoPayload { + /** + * (Required) The FunctionSpec of the WindowFn. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec window_fn = 1; + */ + windowFn?: FunctionSpec; +} +/** + * The payload for the special-but-not-primitive Combine transform. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.CombinePayload + */ +export interface CombinePayload { + /** + * (Required) The FunctionSpec of the CombineFn. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec combine_fn = 1; + */ + combineFn?: FunctionSpec; + /** + * (Required) A reference to the Coder to use for accumulators of the CombineFn + * + * @generated from protobuf field: string accumulator_coder_id = 2; + */ + accumulatorCoderId: string; +} +/** + * The payload for the test-only primitive TestStream + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload + */ +export interface TestStreamPayload { + /** + * (Required) the coder for elements in the TestStream events + * + * @generated from protobuf field: string coder_id = 1; + */ + coderId: string; + /** + * (Optional) If specified, the TestStream will replay these events. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.TestStreamPayload.Event events = 2; + */ + events: TestStreamPayload_Event[]; + /** + * (Optional) If specified, points to a TestStreamService to be + * used to retrieve events. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor endpoint = 3; + */ + endpoint?: ApiServiceDescriptor; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event + */ +export interface TestStreamPayload_Event { + /** + * @generated from protobuf oneof: event + */ + event: { + oneofKind: "watermarkEvent"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceWatermark watermark_event = 1; + */ + watermarkEvent: TestStreamPayload_Event_AdvanceWatermark; + } | { + oneofKind: "processingTimeEvent"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceProcessingTime processing_time_event = 2; + */ + processingTimeEvent: TestStreamPayload_Event_AdvanceProcessingTime; + } | { + oneofKind: "elementEvent"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AddElements element_event = 3; + */ + elementEvent: TestStreamPayload_Event_AddElements; + } | { + oneofKind: undefined; + }; +} +/** + * Advances the watermark to the specified timestamp. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceWatermark + */ +export interface TestStreamPayload_Event_AdvanceWatermark { + /** + * (Required) The watermark in millisecond to advance to. + * + * @generated from protobuf field: int64 new_watermark = 1; + */ + newWatermark: bigint; + /** + * (Optional) The output watermark tag for a PCollection. If unspecified + * or with an empty string, this will default to the Main PCollection + * Output + * + * @generated from protobuf field: string tag = 2; + */ + tag: string; +} +/** + * Advances the processing time clock by the specified amount. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceProcessingTime + */ +export interface TestStreamPayload_Event_AdvanceProcessingTime { + /** + * (Required) The duration in millisecond to advance by. + * + * @generated from protobuf field: int64 advance_duration = 1; + */ + advanceDuration: bigint; +} +/** + * Adds elements to the stream to be emitted. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AddElements + */ +export interface TestStreamPayload_Event_AddElements { + /** + * (Required) The elements to add to the TestStream. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.TestStreamPayload.TimestampedElement elements = 1; + */ + elements: TestStreamPayload_TimestampedElement[]; + /** + * (Optional) The output PCollection tag to add these elements to. If + * unspecified or with an empty string, this will default to the Main + * PCollection Output. + * + * @generated from protobuf field: string tag = 3; + */ + tag: string; +} +/** + * A single element inside of the TestStream. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.TimestampedElement + */ +export interface TestStreamPayload_TimestampedElement { + /** + * (Required) The element encoded. Currently the TestStream only supports + * encoding primitives. + * + * @generated from protobuf field: bytes encoded_element = 1; + */ + encodedElement: Uint8Array; + /** + * (Required) The event timestamp in millisecond of this element. + * + * @generated from protobuf field: int64 timestamp = 2; + */ + timestamp: bigint; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.EventsRequest + */ +export interface EventsRequest { + /** + * The set of PCollections to read from. These are the PTransform outputs + * local names. These are a subset of the TestStream's outputs. This allows + * Interactive Beam to cache many PCollections from a pipeline then replay a + * subset of them. + * + * @generated from protobuf field: repeated string output_ids = 1; + */ + outputIds: string[]; +} +/** + * The payload for the special-but-not-primitive WriteFiles transform. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.WriteFilesPayload + */ +export interface WriteFilesPayload { + /** + * (Required) The FunctionSpec of the FileBasedSink. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec sink = 1; + */ + sink?: FunctionSpec; + /** + * (Required) The format function. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec format_function = 2; + */ + formatFunction?: FunctionSpec; + /** + * @generated from protobuf field: bool windowed_writes = 3; + */ + windowedWrites: boolean; + /** + * @generated from protobuf field: bool runner_determined_sharding = 4; + */ + runnerDeterminedSharding: boolean; + /** + * @generated from protobuf field: map side_inputs = 5; + */ + sideInputs: { + [key: string]: SideInput; + }; +} +/** + * Payload used by Google Cloud Pub/Sub read transform. + * This can be used by runners that wish to override Beam Pub/Sub read transform + * with a native implementation. + * The SDK should guarantee that only one of topic, subscription, + * topic_runtime_overridden and subscription_runtime_overridden is set. + * The output of PubSubReadPayload should be bytes of serialized PubsubMessage + * proto if with_attributes == true. Otherwise, the bytes is the raw payload. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.PubSubReadPayload + */ +export interface PubSubReadPayload { + /** + * Topic to read from. Exactly one of topic or subscription should be set. + * Topic format is: /topics/project_id/subscription_name + * + * @generated from protobuf field: string topic = 1; + */ + topic: string; + /** + * Subscription to read from. Exactly one of topic or subscription should be set. + * Subscription format is: /subscriptions/project_id/subscription_name + * + * @generated from protobuf field: string subscription = 2; + */ + subscription: string; + /** + * Attribute that provides element timestamps. + * + * @generated from protobuf field: string timestamp_attribute = 3; + */ + timestampAttribute: string; + /** + * Attribute to be used for uniquely identifying messages. + * + * @generated from protobuf field: string id_attribute = 4; + */ + idAttribute: string; + /** + * If true, reads Pub/Sub payload as well as attributes. If false, reads only the payload. + * + * @generated from protobuf field: bool with_attributes = 5; + */ + withAttributes: boolean; + /** + * If set, the topic is expected to be provided during runtime. + * + * @generated from protobuf field: string topic_runtime_overridden = 6; + */ + topicRuntimeOverridden: string; + /** + * If set, the subscription that is expected to be provided during runtime. + * + * @generated from protobuf field: string subscription_runtime_overridden = 7; + */ + subscriptionRuntimeOverridden: string; +} +/** + * Payload used by Google Cloud Pub/Sub write transform. + * This can be used by runners that wish to override Beam Pub/Sub write transform + * with a native implementation. + * The SDK should guarantee that only one of topic and topic_runtime_overridden + * is set. + * The output of PubSubWritePayload should be bytes if serialized PubsubMessage + * proto. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.PubSubWritePayload + */ +export interface PubSubWritePayload { + /** + * Topic to write to. + * Topic format is: /topics/project_id/subscription_name + * + * @generated from protobuf field: string topic = 1; + */ + topic: string; + /** + * Attribute that provides element timestamps. + * + * @generated from protobuf field: string timestamp_attribute = 2; + */ + timestampAttribute: string; + /** + * Attribute that uniquely identify messages. + * + * @generated from protobuf field: string id_attribute = 3; + */ + idAttribute: string; + /** + * If set, the topic is expected to be provided during runtime. + * + * @generated from protobuf field: string topic_runtime_overridden = 4; + */ + topicRuntimeOverridden: string; +} +/** + * Payload for GroupIntoBatches composite transform. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.GroupIntoBatchesPayload + */ +export interface GroupIntoBatchesPayload { + /** + * Max size of a batch. + * + * @generated from protobuf field: int64 batch_size = 1; + */ + batchSize: bigint; + /** + * Max byte size of a batch in element. + * + * @generated from protobuf field: int64 batch_size_bytes = 3; + */ + batchSizeBytes: bigint; + /** + * (Optional) Max duration a batch is allowed to be cached in states. + * + * @generated from protobuf field: int64 max_buffering_duration_millis = 2; + */ + maxBufferingDurationMillis: bigint; +} +/** + * A coder, the binary format for serialization and deserialization of data in + * a pipeline. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Coder + */ +export interface Coder { + /** + * (Required) A specification for the coder, as a URN plus parameters. This + * may be a cross-language agreed-upon format, or it may be a "custom coder" + * that can only be used by a particular SDK. It does not include component + * coders, as it is beneficial for these to be comprehensible to a runner + * regardless of whether the binary format is agreed-upon. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec spec = 1; + */ + spec?: FunctionSpec; + /** + * (Optional) If this coder is parametric, such as ListCoder(VarIntCoder), + * this is a list of the components. In order for encodings to be identical, + * the FunctionSpec and all components must be identical, recursively. + * + * @generated from protobuf field: repeated string component_coder_ids = 2; + */ + componentCoderIds: string[]; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardCoders + */ +export interface StandardCoders { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardCoders.Enum + */ +export enum StandardCoders_Enum { + /** + * Components: None + * + * @generated from protobuf enum value: BYTES = 0; + */ + BYTES = 0, + /** + * Components: None + * + * @generated from protobuf enum value: STRING_UTF8 = 10; + */ + STRING_UTF8 = 10, + /** + * Components: The key and value coder, in that order. + * + * @generated from protobuf enum value: KV = 1; + */ + KV = 1, + /** + * Components: None + * + * @generated from protobuf enum value: BOOL = 12; + */ + BOOL = 12, + /** + * Variable length Encodes a 64-bit integer. + * Components: None + * + * @generated from protobuf enum value: VARINT = 2; + */ + VARINT = 2, + /** + * Encodes the floating point value as a big-endian 64-bit integer + * according to the IEEE 754 double format bit layout. + * Components: None + * + * @generated from protobuf enum value: DOUBLE = 11; + */ + DOUBLE = 11, + /** + * Encodes an iterable of elements. + * + * The encoding for an iterable [e1...eN] of known length N is + * + * fixed32(N) + * encode(e1) encode(e2) encode(e3) ... encode(eN) + * + * If the length is unknown, it is batched up into groups of size b1..bM + * and encoded as + * + * fixed32(-1) + * varInt64(b1) encode(e1) encode(e2) ... encode(e_b1) + * varInt64(b2) encode(e_(b1+1)) encode(e_(b1+2)) ... encode(e_(b1+b2)) + * ... + * varInt64(bM) encode(e_(N-bM+1)) encode(e_(N-bM+2)) ... encode(eN) + * varInt64(0) + * + * Components: Coder for a single element. + * + * @generated from protobuf enum value: ITERABLE = 3; + */ + ITERABLE = 3, + /** + * Encodes a timer containing a user key, a dynamic timer tag, a clear bit, + * a fire timestamp, a hold timestamp, the windows and the paneinfo. + * The encoding is represented as: + * user key - user defined key, uses the component coder. + * dynamic timer tag - a string which identifies a timer. + * windows - uses component coders. + * clear bit - a boolean set for clearing the timer. + * fire timestamp - a big endian 8 byte integer representing millis-since-epoch. + * The encoded representation is shifted so that the byte representation of + * negative values are lexicographically ordered before the byte representation + * of positive values. This is typically done by subtracting -9223372036854775808 + * from the value and encoding it as a signed big endian integer. Example values: + * + * -9223372036854775808: 00 00 00 00 00 00 00 00 + * -255: 7F FF FF FF FF FF FF 01 + * -1: 7F FF FF FF FF FF FF FF + * 0: 80 00 00 00 00 00 00 00 + * 1: 80 00 00 00 00 00 00 01 + * 256: 80 00 00 00 00 00 01 00 + * 9223372036854775807: FF FF FF FF FF FF FF FF + * hold timestamp - similar to the fire timestamp. + * paneinfo - similar to the paneinfo of the windowed_value. + * Components: Coder for the key and windows. + * + * @generated from protobuf enum value: TIMER = 4; + */ + TIMER = 4, + /** + * Components: None + * + * @generated from protobuf enum value: INTERVAL_WINDOW = 5; + */ + INTERVAL_WINDOW = 5, + /** + * Components: The coder to attach a length prefix to + * + * @generated from protobuf enum value: LENGTH_PREFIX = 6; + */ + LENGTH_PREFIX = 6, + /** + * Components: None + * + * @generated from protobuf enum value: GLOBAL_WINDOW = 7; + */ + GLOBAL_WINDOW = 7, + /** + * Encodes an element, the windows it is in, the timestamp of the element, + * and the pane of the element. The encoding is represented as: + * timestamp windows pane element + * timestamp - A big endian 8 byte integer representing millis-since-epoch. + * The encoded representation is shifted so that the byte representation + * of negative values are lexicographically ordered before the byte + * representation of positive values. This is typically done by + * subtracting -9223372036854775808 from the value and encoding it as a + * signed big endian integer. Example values: + * + * -9223372036854775808: 00 00 00 00 00 00 00 00 + * -255: 7F FF FF FF FF FF FF 01 + * -1: 7F FF FF FF FF FF FF FF + * 0: 80 00 00 00 00 00 00 00 + * 1: 80 00 00 00 00 00 00 01 + * 256: 80 00 00 00 00 00 01 00 + * 9223372036854775807: FF FF FF FF FF FF FF FF + * + * windows - The windows are encoded using the beam:coder:iterable:v1 + * format, where the windows are encoded using the supplied window + * coder. + * + * pane - The first byte of the pane info determines which type of + * encoding is used, as well as the is_first, is_last, and timing + * fields. If this byte is bits [0 1 2 3 4 5 6 7], then: + * * bits [0 1 2 3] determine the encoding as follows: + * 0000 - The entire pane info is encoded as a single byte. + * The is_first, is_last, and timing fields are encoded + * as below, and the index and non-speculative index are + * both zero (and hence are not encoded here). + * 0001 - The pane info is encoded as this byte plus a single + * VarInt encoed integer representing the pane index. The + * non-speculative index can be derived as follows: + * -1 if the pane is early, otherwise equal to index. + * 0010 - The pane info is encoded as this byte plus two VarInt + * encoded integers representing the pane index and + * non-speculative index respectively. + * * bits [4 5] encode the timing as follows: + * 00 - early + * 01 - on time + * 10 - late + * 11 - unknown + * * bit 6 is 1 if this is the first pane, 0 otherwise. + * * bit 7 is 1 if this is the last pane, 0 otherwise. + * + * element - The element incoded using the supplied element coder. + * + * Components: The element coder and the window coder, in that order. + * + * @generated from protobuf enum value: WINDOWED_VALUE = 8; + */ + WINDOWED_VALUE = 8, + /** + * A windowed value coder with parameterized timestamp, windows and pane info. + * Encodes an element with only the value of the windowed value. + * Decodes the value and assigns the parameterized timestamp, windows and pane info to the + * windowed value. + * Components: The element coder and the window coder, in that order + * The payload of this coder is an encoded windowed value using the + * beam:coder:windowed_value:v1 coder parameterized by a beam:coder:bytes:v1 + * element coder and the window coder that this param_windowed_value coder uses. + * + * @generated from protobuf enum value: PARAM_WINDOWED_VALUE = 14; + */ + PARAM_WINDOWED_VALUE = 14, + /** + * Encodes an iterable of elements, some of which may be stored elsewhere. + * + * The encoding for a state-backed iterable is the same as that for + * an iterable, but the final varInt64(0) terminating the set of batches + * may instead be replaced by + * + * varInt64(-1) + * varInt64(len(token)) + * token + * + * where token is an opaque byte string that can be used to fetch the + * remainder of the iterable (e.g. over the state API). + * + * Components: Coder for a single element. + * Experimental. + * + * @generated from protobuf enum value: STATE_BACKED_ITERABLE = 9; + */ + STATE_BACKED_ITERABLE = 9, + /** + * Encodes an arbitrary user defined window and its max timestamp (inclusive). + * The encoding format is: + * maxTimestamp window + * + * maxTimestamp - A big endian 8 byte integer representing millis-since-epoch. + * The encoded representation is shifted so that the byte representation + * of negative values are lexicographically ordered before the byte + * representation of positive values. This is typically done by + * subtracting -9223372036854775808 from the value and encoding it as a + * signed big endian integer. Example values: + * + * -9223372036854775808: 00 00 00 00 00 00 00 00 + * -255: 7F FF FF FF FF FF FF 01 + * -1: 7F FF FF FF FF FF FF FF + * 0: 80 00 00 00 00 00 00 00 + * 1: 80 00 00 00 00 00 00 01 + * 256: 80 00 00 00 00 00 01 00 + * 9223372036854775807: FF FF FF FF FF FF FF FF + * + * window - the window is encoded using the supplied window coder. + * + * Components: Coder for the custom window type. + * + * @generated from protobuf enum value: CUSTOM_WINDOW = 16; + */ + CUSTOM_WINDOW = 16, + /** + * Encodes a "row", an element with a known schema, defined by an + * instance of Schema from schema.proto. + * + * A row is encoded as the concatenation of: + * - The number of attributes in the schema, encoded with + * beam:coder:varint:v1. This makes it possible to detect certain + * allowed schema changes (appending or removing columns) in + * long-running streaming pipelines. + * - A byte array representing a packed bitset indicating null fields (a + * 1 indicating a null) encoded with beam:coder:bytes:v1. The unused + * bits in the last byte must be set to 0. If there are no nulls an + * empty byte array is encoded. + * The two-byte bitset (not including the lenghth-prefix) for the row + * [NULL, 0, 0, 0, NULL, 0, 0, NULL, 0, NULL] would be + * [0b10010001, 0b00000010] + * - An encoding for each non-null field, concatenated together. + * + * Schema types are mapped to coders as follows: + * AtomicType: + * BYTE: not yet a standard coder (BEAM-7996) + * INT16: not yet a standard coder (BEAM-7996) + * INT32: beam:coder:varint:v1 + * INT64: beam:coder:varint:v1 + * FLOAT: not yet a standard coder (BEAM-7996) + * DOUBLE: beam:coder:double:v1 + * STRING: beam:coder:string_utf8:v1 + * BOOLEAN: beam:coder:bool:v1 + * BYTES: beam:coder:bytes:v1 + * ArrayType: beam:coder:iterable:v1 (always has a known length) + * MapType: not a standard coder, specification defined below. + * RowType: beam:coder:row:v1 + * LogicalType: Uses the coder for its representation. + * + * The MapType is encoded by: + * - An INT32 representing the size of the map (N) + * - Followed by N interleaved keys and values, encoded with their + * corresponding coder. + * + * Nullable types in container types (ArrayType, MapType) are encoded by: + * - A one byte null indicator, 0x00 for null values, or 0x01 for present + * values. + * - For present values the null indicator is followed by the value + * encoded with it's corresponding coder. + * + * Well known logical types: + * beam:logical_type:micros_instant:v1 + * - Representation type: ROW + * - A timestamp without a timezone where seconds + micros represents the + * amount of time since the epoch. + * + * The payload for RowCoder is an instance of Schema. + * Components: None + * Experimental. + * + * @generated from protobuf enum value: ROW = 13; + */ + ROW = 13, + /** + * Encodes a user key and a shard id which is an opaque byte string. + * + * The encoding for a sharded key consists of a shard id byte string and the + * encoded user key in the following order: + * + * - shard id using beam:coder:bytes:v1 + * - encoded user key + * + * Examples: + * user key with an empty shard id + * 0x00 + * encode(user_key) + * + * user key with a shard id taking up two bytes. + * 0x02 + * 0x11 0x22 + * encode(user_key) + * + * Components: the user key coder. + * Experimental. + * + * @generated from protobuf enum value: SHARDED_KEY = 15; + */ + SHARDED_KEY = 15 +} +/** + * A windowing strategy describes the window function, triggering, allowed + * lateness, and accumulation mode for a PCollection. + * + * TODO: consider inlining field on PCollection + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.WindowingStrategy + */ +export interface WindowingStrategy { + /** + * (Required) The FunctionSpec of the UDF that assigns windows, + * merges windows, and shifts timestamps before they are + * combined according to the OutputTime. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec window_fn = 1; + */ + windowFn?: FunctionSpec; + /** + * (Required) Whether or not the window fn is merging. + * + * This knowledge is required for many optimizations. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.MergeStatus.Enum merge_status = 2; + */ + mergeStatus: MergeStatus_Enum; + /** + * (Required) The coder for the windows of this PCollection. + * + * @generated from protobuf field: string window_coder_id = 3; + */ + windowCoderId: string; + /** + * (Required) The trigger to use when grouping this PCollection. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger trigger = 4; + */ + trigger?: Trigger; + /** + * (Required) The accumulation mode indicates whether new panes are a full + * replacement for prior panes or whether they are deltas to be combined + * with other panes (the combine should correspond to whatever the upstream + * grouping transform is). + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.AccumulationMode.Enum accumulation_mode = 5; + */ + accumulationMode: AccumulationMode_Enum; + /** + * (Required) The OutputTime specifies, for a grouping transform, how to + * compute the aggregate timestamp. The window_fn will first possibly shift + * it later, then the OutputTime takes the max, min, or ignores it and takes + * the end of window. + * + * This is actually only for input to grouping transforms, but since they + * may be introduced in runner-specific ways, it is carried along with the + * windowing strategy. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.OutputTime.Enum output_time = 6; + */ + outputTime: OutputTime_Enum; + /** + * (Required) Indicate when output should be omitted upon window expiration. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ClosingBehavior.Enum closing_behavior = 7; + */ + closingBehavior: ClosingBehavior_Enum; + /** + * (Required) The duration, in milliseconds, beyond the end of a window at + * which the window becomes droppable. + * + * @generated from protobuf field: int64 allowed_lateness = 8; + */ + allowedLateness: bigint; + /** + * (Required) Indicate whether empty on-time panes should be omitted. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.OnTimeBehavior.Enum on_time_behavior = 9; + */ + onTimeBehavior: OnTimeBehavior_Enum; + /** + * (Required) Whether or not the window fn assigns inputs to exactly one window + * + * This knowledge is required for some optimizations + * + * @generated from protobuf field: bool assigns_to_one_window = 10; + */ + assignsToOneWindow: boolean; + /** + * (Optional) Environment where the current window_fn should be applied in. + * Runner that executes the pipeline may choose to override this if needed. + * If not specified, environment will be decided by the runner. + * + * @generated from protobuf field: string environment_id = 11; + */ + environmentId: string; +} +/** + * Whether or not a PCollection's WindowFn is non-merging, merging, or + * merging-but-already-merged, in which case a subsequent GroupByKey is almost + * always going to do something the user does not want + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MergeStatus + */ +export interface MergeStatus { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.MergeStatus.Enum + */ +export enum MergeStatus_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * The WindowFn does not require merging. + * Examples: global window, FixedWindows, SlidingWindows + * + * @generated from protobuf enum value: NON_MERGING = 1; + */ + NON_MERGING = 1, + /** + * The WindowFn is merging and the PCollection has not had merging + * performed. + * Example: Sessions prior to a GroupByKey + * + * @generated from protobuf enum value: NEEDS_MERGE = 2; + */ + NEEDS_MERGE = 2, + /** + * The WindowFn is merging and the PCollection has had merging occur + * already. + * Example: Sessions after a GroupByKey + * + * @generated from protobuf enum value: ALREADY_MERGED = 3; + */ + ALREADY_MERGED = 3 +} +/** + * Whether or not subsequent outputs of aggregations should be entire + * replacement values or just the aggregation of inputs received since + * the prior output. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.AccumulationMode + */ +export interface AccumulationMode { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.AccumulationMode.Enum + */ +export enum AccumulationMode_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * The aggregation is discarded when it is output + * + * @generated from protobuf enum value: DISCARDING = 1; + */ + DISCARDING = 1, + /** + * The aggregation is accumulated across outputs + * + * @generated from protobuf enum value: ACCUMULATING = 2; + */ + ACCUMULATING = 2, + /** + * The aggregation emits retractions when it is output + * + * @generated from protobuf enum value: RETRACTING = 3; + */ + RETRACTING = 3 +} +/** + * Controls whether or not an aggregating transform should output data + * when a window expires. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ClosingBehavior + */ +export interface ClosingBehavior { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.ClosingBehavior.Enum + */ +export enum ClosingBehavior_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * Emit output when a window expires, whether or not there has been + * any new data since the last output. + * + * @generated from protobuf enum value: EMIT_ALWAYS = 1; + */ + EMIT_ALWAYS = 1, + /** + * Only emit output when new data has arrives since the last output + * + * @generated from protobuf enum value: EMIT_IF_NONEMPTY = 2; + */ + EMIT_IF_NONEMPTY = 2 +} +/** + * Controls whether or not an aggregating transform should output data + * when an on-time pane is empty. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.OnTimeBehavior + */ +export interface OnTimeBehavior { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.OnTimeBehavior.Enum + */ +export enum OnTimeBehavior_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * Always fire the on-time pane. Even if there is no new data since + * the previous firing, an element will be produced. + * + * @generated from protobuf enum value: FIRE_ALWAYS = 1; + */ + FIRE_ALWAYS = 1, + /** + * Only fire the on-time pane if there is new data since the previous firing. + * + * @generated from protobuf enum value: FIRE_IF_NONEMPTY = 2; + */ + FIRE_IF_NONEMPTY = 2 +} +/** + * When a number of windowed, timestamped inputs are aggregated, the timestamp + * for the resulting output. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.OutputTime + */ +export interface OutputTime { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.OutputTime.Enum + */ +export enum OutputTime_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * The output has the timestamp of the end of the window. + * + * @generated from protobuf enum value: END_OF_WINDOW = 1; + */ + END_OF_WINDOW = 1, + /** + * The output has the latest timestamp of the input elements since + * the last output. + * + * @generated from protobuf enum value: LATEST_IN_PANE = 2; + */ + LATEST_IN_PANE = 2, + /** + * The output has the earliest timestamp of the input elements since + * the last output. + * + * @generated from protobuf enum value: EARLIEST_IN_PANE = 3; + */ + EARLIEST_IN_PANE = 3 +} +/** + * The different time domains in the Beam model. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TimeDomain + */ +export interface TimeDomain { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.TimeDomain.Enum + */ +export enum TimeDomain_Enum { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * Event time is time from the perspective of the data + * + * @generated from protobuf enum value: EVENT_TIME = 1; + */ + EVENT_TIME = 1, + /** + * Processing time is time from the perspective of the + * execution of your pipeline + * + * @generated from protobuf enum value: PROCESSING_TIME = 2; + */ + PROCESSING_TIME = 2 +} +/** + * A small DSL for expressing when to emit new aggregations + * from a GroupByKey or CombinePerKey + * + * A trigger is described in terms of when it is _ready_ to permit output. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger + */ +export interface Trigger { + /** + * @generated from protobuf oneof: trigger + */ + trigger: { + oneofKind: "afterAll"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.AfterAll after_all = 1; + */ + afterAll: Trigger_AfterAll; + } | { + oneofKind: "afterAny"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.AfterAny after_any = 2; + */ + afterAny: Trigger_AfterAny; + } | { + oneofKind: "afterEach"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.AfterEach after_each = 3; + */ + afterEach: Trigger_AfterEach; + } | { + oneofKind: "afterEndOfWindow"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.AfterEndOfWindow after_end_of_window = 4; + */ + afterEndOfWindow: Trigger_AfterEndOfWindow; + } | { + oneofKind: "afterProcessingTime"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.AfterProcessingTime after_processing_time = 5; + */ + afterProcessingTime: Trigger_AfterProcessingTime; + } | { + oneofKind: "afterSynchronizedProcessingTime"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.AfterSynchronizedProcessingTime after_synchronized_processing_time = 6; + */ + afterSynchronizedProcessingTime: Trigger_AfterSynchronizedProcessingTime; + } | { + oneofKind: "always"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.Always always = 12; + */ + always: Trigger_Always; + } | { + oneofKind: "default"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.Default default = 7; + */ + default: Trigger_Default; + } | { + oneofKind: "elementCount"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.ElementCount element_count = 8; + */ + elementCount: Trigger_ElementCount; + } | { + oneofKind: "never"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.Never never = 9; + */ + never: Trigger_Never; + } | { + oneofKind: "orFinally"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.OrFinally or_finally = 10; + */ + orFinally: Trigger_OrFinally; + } | { + oneofKind: "repeat"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger.Repeat repeat = 11; + */ + repeat: Trigger_Repeat; + } | { + oneofKind: undefined; + }; +} +/** + * Ready when all subtriggers are ready. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterAll + */ +export interface Trigger_AfterAll { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers = 1; + */ + subtriggers: Trigger[]; +} +/** + * Ready when any subtrigger is ready. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterAny + */ +export interface Trigger_AfterAny { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers = 1; + */ + subtriggers: Trigger[]; +} +/** + * Starting with the first subtrigger, ready when the _current_ subtrigger + * is ready. After output, advances the current trigger by one. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterEach + */ +export interface Trigger_AfterEach { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers = 1; + */ + subtriggers: Trigger[]; +} +/** + * Ready after the input watermark is past the end of the window. + * + * May have implicitly-repeated subtriggers for early and late firings. + * When the end of the window is reached, the trigger transitions between + * the subtriggers. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterEndOfWindow + */ +export interface Trigger_AfterEndOfWindow { + /** + * (Optional) A trigger governing output prior to the end of the window. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger early_firings = 1; + */ + earlyFirings?: Trigger; + /** + * (Optional) A trigger governing output after the end of the window. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger late_firings = 2; + */ + lateFirings?: Trigger; +} +/** + * After input arrives, ready when the specified delay has passed. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterProcessingTime + */ +export interface Trigger_AfterProcessingTime { + /** + * (Required) The transforms to apply to an arriving element's timestamp, + * in order + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.TimestampTransform timestamp_transforms = 1; + */ + timestampTransforms: TimestampTransform[]; +} +/** + * Ready whenever upstream processing time has all caught up with + * the arrival time of an input element + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterSynchronizedProcessingTime + */ +export interface Trigger_AfterSynchronizedProcessingTime { +} +/** + * The default trigger. Equivalent to Repeat { AfterEndOfWindow } but + * specially denoted to indicate the user did not alter the triggering. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.Default + */ +export interface Trigger_Default { +} +/** + * Ready whenever the requisite number of input elements have arrived + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.ElementCount + */ +export interface Trigger_ElementCount { + /** + * @generated from protobuf field: int32 element_count = 1; + */ + elementCount: number; +} +/** + * Never ready. There will only be an ON_TIME output and a final + * output at window expiration. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.Never + */ +export interface Trigger_Never { +} +/** + * Always ready. This can also be expressed as ElementCount(1) but + * is more explicit. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.Always + */ +export interface Trigger_Always { +} +/** + * Ready whenever either of its subtriggers are ready, but finishes output + * when the finally subtrigger fires. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.OrFinally + */ +export interface Trigger_OrFinally { + /** + * (Required) Trigger governing main output; may fire repeatedly. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger main = 1; + */ + main?: Trigger; + /** + * (Required) Trigger governing termination of output. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger finally = 2; + */ + finally?: Trigger; +} +/** + * Ready whenever the subtrigger is ready; resets state when the subtrigger + * completes. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Trigger.Repeat + */ +export interface Trigger_Repeat { + /** + * (Require) Trigger that is run repeatedly. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Trigger subtrigger = 1; + */ + subtrigger?: Trigger; +} +/** + * A specification for a transformation on a timestamp. + * + * Primarily used by AfterProcessingTime triggers to transform + * the arrival time of input to a target time for firing. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TimestampTransform + */ +export interface TimestampTransform { + /** + * @generated from protobuf oneof: timestamp_transform + */ + timestampTransform: { + oneofKind: "delay"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.TimestampTransform.Delay delay = 1; + */ + delay: TimestampTransform_Delay; + } | { + oneofKind: "alignTo"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.TimestampTransform.AlignTo align_to = 2; + */ + alignTo: TimestampTransform_AlignTo; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TimestampTransform.Delay + */ +export interface TimestampTransform_Delay { + /** + * (Required) The delay, in milliseconds. + * + * @generated from protobuf field: int64 delay_millis = 1; + */ + delayMillis: bigint; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.TimestampTransform.AlignTo + */ +export interface TimestampTransform_AlignTo { + /** + * (Required) A duration to which delays should be quantized + * in milliseconds. + * + * @generated from protobuf field: int64 period = 3; + */ + period: bigint; + /** + * (Required) An offset from 0 for the quantization specified by + * alignment_size, in milliseconds + * + * @generated from protobuf field: int64 offset = 4; + */ + offset: bigint; +} +/** + * A specification for how to "side input" a PCollection. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.SideInput + */ +export interface SideInput { + /** + * (Required) URN of the access pattern required by the `view_fn` to present + * the desired SDK-specific interface to a UDF. + * + * This access pattern defines the SDK harness <-> Runner Harness RPC + * interface for accessing a side input. + * + * The only access pattern intended for Beam, because of its superior + * performance possibilities, is "beam:sideinput:multimap" (or some such + * URN) + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec access_pattern = 1; + */ + accessPattern?: FunctionSpec; + /** + * (Required) The FunctionSpec of the UDF that adapts a particular + * access_pattern to a user-facing view type. + * + * For example, View.asSingleton() may include a `view_fn` that adapts a + * specially-designed multimap to a single value per window. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec view_fn = 2; + */ + viewFn?: FunctionSpec; + /** + * (Required) The FunctionSpec of the UDF that maps a main input window + * to a side input window. + * + * For example, when the main input is in fixed windows of one hour, this + * can specify that the side input should be accessed according to the day + * in which that hour falls. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec window_mapping_fn = 3; + */ + windowMappingFn?: FunctionSpec; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardArtifacts + */ +export interface StandardArtifacts { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardArtifacts.Types + */ +export enum StandardArtifacts_Types { + /** + * A URN for locally-accessible artifact files. + * payload: ArtifactFilePayload + * + * @generated from protobuf enum value: FILE = 0; + */ + FILE = 0, + /** + * A URN for artifacts described by URLs. + * payload: ArtifactUrlPayload + * + * @generated from protobuf enum value: URL = 1; + */ + URL = 1, + /** + * A URN for artifacts embedded in ArtifactInformation proto. + * payload: EmbeddedFilePayload. + * + * @generated from protobuf enum value: EMBEDDED = 2; + */ + EMBEDDED = 2, + /** + * A URN for Python artifacts hosted on PYPI. + * payload: PypiPayload + * + * @generated from protobuf enum value: PYPI = 3; + */ + PYPI = 3, + /** + * A URN for Java artifacts hosted on a Maven repository. + * payload: MavenPayload + * + * @generated from protobuf enum value: MAVEN = 4; + */ + MAVEN = 4, + /** + * A URN for deferred artifacts. + * payload: DeferredArtifactPayload + * + * @generated from protobuf enum value: DEFERRED = 5; + */ + DEFERRED = 5 +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardArtifacts.Roles + */ +export enum StandardArtifacts_Roles { + /** + * A URN for staging-to role. + * payload: ArtifactStagingToRolePayload + * + * @generated from protobuf enum value: STAGING_TO = 0; + */ + STAGING_TO = 0, + /** + * A URN for pip-requirements-file role. + * payload: None + * + * @generated from protobuf enum value: PIP_REQUIREMENTS_FILE = 1; + */ + PIP_REQUIREMENTS_FILE = 1 +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ArtifactFilePayload + */ +export interface ArtifactFilePayload { + /** + * a string for an artifact file path e.g. "/tmp/foo.jar" + * + * @generated from protobuf field: string path = 1; + */ + path: string; + /** + * The hex-encoded sha256 checksum of the artifact. + * + * @generated from protobuf field: string sha256 = 2; + */ + sha256: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ArtifactUrlPayload + */ +export interface ArtifactUrlPayload { + /** + * a string for an artifact URL e.g. "https://.../foo.jar" or "gs://tmp/foo.jar" + * + * @generated from protobuf field: string url = 1; + */ + url: string; + /** + * (Optional) The hex-encoded sha256 checksum of the artifact if available. + * + * @generated from protobuf field: string sha256 = 2; + */ + sha256: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.EmbeddedFilePayload + */ +export interface EmbeddedFilePayload { + /** + * raw data bytes for an embedded artifact + * + * @generated from protobuf field: bytes data = 1; + */ + data: Uint8Array; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.PyPIPayload + */ +export interface PyPIPayload { + /** + * Pypi compatible artifact id e.g. "apache-beam" + * + * @generated from protobuf field: string artifact_id = 1; + */ + artifactId: string; + /** + * Pypi compatible version string. + * + * @generated from protobuf field: string version = 2; + */ + version: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MavenPayload + */ +export interface MavenPayload { + /** + * A string specifying Maven artifact. + * The standard format is "groupId:artifactId:version[:packaging[:classifier]]" + * + * @generated from protobuf field: string artifact = 1; + */ + artifact: string; + /** + * (Optional) Repository URL. If not specified, Maven central is used by default. + * + * @generated from protobuf field: string repository_url = 2; + */ + repositoryUrl: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.DeferredArtifactPayload + */ +export interface DeferredArtifactPayload { + /** + * A unique string identifier assigned by the creator of this payload. The creator may use this key to confirm + * whether they can parse the data. + * + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * Data for deferred artifacts. Interpretation of bytes is delegated to the creator of this payload. + * + * @generated from protobuf field: bytes data = 2; + */ + data: Uint8Array; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ArtifactStagingToRolePayload + */ +export interface ArtifactStagingToRolePayload { + /** + * A generated staged name (relative path under staging directory). + * + * @generated from protobuf field: string staged_name = 1; + */ + stagedName: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ArtifactInformation + */ +export interface ArtifactInformation { + /** + * A URN that describes the type of artifact + * + * @generated from protobuf field: string type_urn = 1; + */ + typeUrn: string; + /** + * @generated from protobuf field: bytes type_payload = 2; + */ + typePayload: Uint8Array; + /** + * A URN that describes the role of artifact + * + * @generated from protobuf field: string role_urn = 3; + */ + roleUrn: string; + /** + * @generated from protobuf field: bytes role_payload = 4; + */ + rolePayload: Uint8Array; +} +/** + * An environment for executing UDFs. By default, an SDK container URL, but + * can also be a process forked by a command, or an externally managed process. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Environment + */ +export interface Environment { + /** + * (Required) The URN of the payload + * + * @generated from protobuf field: string urn = 2; + */ + urn: string; + /** + * (Optional) The data specifying any parameters to the URN. If + * the URN does not require any arguments, this may be omitted. + * + * @generated from protobuf field: bytes payload = 3; + */ + payload: Uint8Array; + /** + * (Optional) Static display data for the environment. If there is none, + * it may be omitted. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 4; + */ + displayData: DisplayData[]; + /** + * (Optional) A set of capabilities this environment supports. This is + * typically a list of common URNs designating coders, transforms, etc. that + * this environment understands (and a runner MAY use) despite not + * appearing in the pipeline proto. This may also be used to indicate + * support of optional protocols not tied to a concrete component. + * + * @generated from protobuf field: repeated string capabilities = 5; + */ + capabilities: string[]; + /** + * (Optional) artifact dependency information used for executing UDFs in this environment. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ArtifactInformation dependencies = 6; + */ + dependencies: ArtifactInformation[]; + /** + * (Optional) A mapping of resource URNs to requested values. The encoding + * of the values is specified by the URN. Resource hints are advisory; + * a runner is free to ignore resource hints that it does not understand. + * + * @generated from protobuf field: map resource_hints = 7; + */ + resourceHints: { + [key: string]: Uint8Array; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardEnvironments + */ +export interface StandardEnvironments { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardEnvironments.Environments + */ +export enum StandardEnvironments_Environments { + /** + * A managed docker container to run user code. + * + * @generated from protobuf enum value: DOCKER = 0; + */ + DOCKER = 0, + /** + * A managed native process to run user code. + * + * @generated from protobuf enum value: PROCESS = 1; + */ + PROCESS = 1, + /** + * An external non managed process to run user code. + * + * @generated from protobuf enum value: EXTERNAL = 2; + */ + EXTERNAL = 2, + /** + * Used as a stub when context is missing a runner-provided default environment. + * + * @generated from protobuf enum value: DEFAULT = 3; + */ + DEFAULT = 3 +} +/** + * The payload of a Docker image + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.DockerPayload + */ +export interface DockerPayload { + /** + * @generated from protobuf field: string container_image = 1; + */ + containerImage: string; // implicitly linux_amd64. +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ProcessPayload + */ +export interface ProcessPayload { + /** + * @generated from protobuf field: string os = 1; + */ + os: string; // "linux", "darwin", .. + /** + * @generated from protobuf field: string arch = 2; + */ + arch: string; // "amd64", .. + /** + * @generated from protobuf field: string command = 3; + */ + command: string; // process to execute + /** + * @generated from protobuf field: map env = 4; + */ + env: { + [key: string]: string; + }; // Environment variables +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExternalPayload + */ +export interface ExternalPayload { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ApiServiceDescriptor endpoint = 1; + */ + endpoint?: ApiServiceDescriptor; + /** + * @generated from protobuf field: map params = 2; + */ + params: { + [key: string]: string; + }; // Arbitrary extra parameters to pass +} +/** + * These URNs are used to indicate capabilities of environments that cannot + * simply be expressed as a component (such as a Coder or PTransform) that this + * environment understands. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardProtocols + */ +export interface StandardProtocols { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardProtocols.Enum + */ +export enum StandardProtocols_Enum { + /** + * Indicates suport for progress reporting via the legacy Metrics proto. + * + * @generated from protobuf enum value: LEGACY_PROGRESS_REPORTING = 0; + */ + LEGACY_PROGRESS_REPORTING = 0, + /** + * Indicates suport for progress reporting via the new MonitoringInfo proto. + * + * @generated from protobuf enum value: PROGRESS_REPORTING = 1; + */ + PROGRESS_REPORTING = 1, + /** + * Indicates suport for worker status protocol defined at + * https://s.apache.org/beam-fn-api-harness-status. + * + * @generated from protobuf enum value: WORKER_STATUS = 2; + */ + WORKER_STATUS = 2, + /** + * Indicates this SDK can take advantage of multiple cores when processing + * concurrent process bundle requests. (Note that all SDKs must process + * an unbounded number of concurrent process bundle requests; this capability + * simply indicates this SDK can actually parallelize the work across multiple + * cores. + * + * @generated from protobuf enum value: MULTI_CORE_BUNDLE_PROCESSING = 3; + */ + MULTI_CORE_BUNDLE_PROCESSING = 3, + /** + * Indicates this SDK can cheaply spawn sibling workers (e.g. within the + * same container) to work around the fact that it cannot take advantage + * of multiple cores (i.e. MULTI_CORE_BUNDLE_PROCESSING is not set). + * + * @generated from protobuf enum value: SIBLING_WORKERS = 5; + */ + SIBLING_WORKERS = 5, + /** + * Indicates that this SDK handles the InstructionRequest of type + * HarnessMonitoringInfosRequest. + * A request to provide full MonitoringInfo data associated with + * the entire SDK harness process, not specific to a bundle. + * + * @generated from protobuf enum value: HARNESS_MONITORING_INFOS = 4; + */ + HARNESS_MONITORING_INFOS = 4, + /** + * Indicates that this SDK can process elements embedded in the + * ProcessBundleRequest. See more about the protocol at + * https://s.apache.org/beam-fn-api-control-data-embedding + * + * @generated from protobuf enum value: CONTROL_REQUEST_ELEMENTS_EMBEDDING = 6; + */ + CONTROL_REQUEST_ELEMENTS_EMBEDDING = 6 +} +/** + * These URNs are used to indicate capabilities of runner that an environment + * may take advantage of when interacting with this runner. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardRunnerProtocols + */ +export interface StandardRunnerProtocols { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardRunnerProtocols.Enum + */ +export enum StandardRunnerProtocols_Enum { + /** + * Indicates suport the MonitoringInfo short id protocol. + * + * @generated from protobuf enum value: MONITORING_INFO_SHORT_IDS = 0; + */ + MONITORING_INFO_SHORT_IDS = 0, + /** + * Indicates that this runner can process elements embedded in the + * ProcessBundleResponse. See more about the protocol at + * https://s.apache.org/beam-fn-api-control-data-embedding + * + * @generated from protobuf enum value: CONTROL_RESPONSE_ELEMENTS_EMBEDDING = 6; + */ + CONTROL_RESPONSE_ELEMENTS_EMBEDDING = 6 +} +/** + * These URNs are used to indicate requirements of a pipeline that cannot + * simply be expressed as a component (such as a Coder or PTransform) that the + * runner must understand. In many cases, this indicates a particular field + * of a transform must be inspected and respected (which allows new fields + * to be added in a forwards-compatible way). + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardRequirements + */ +export interface StandardRequirements { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardRequirements.Enum + */ +export enum StandardRequirements_Enum { + /** + * This requirement indicates the state_spec and time_spec fields of ParDo + * transform payloads must be inspected. + * + * @generated from protobuf enum value: REQUIRES_STATEFUL_PROCESSING = 0; + */ + REQUIRES_STATEFUL_PROCESSING = 0, + /** + * This requirement indicates the requests_finalization field of ParDo + * transform payloads must be inspected. + * + * @generated from protobuf enum value: REQUIRES_BUNDLE_FINALIZATION = 1; + */ + REQUIRES_BUNDLE_FINALIZATION = 1, + /** + * This requirement indicates the requires_stable_input field of ParDo + * transform payloads must be inspected. + * + * @generated from protobuf enum value: REQUIRES_STABLE_INPUT = 2; + */ + REQUIRES_STABLE_INPUT = 2, + /** + * This requirement indicates the requires_time_sorted_input field of ParDo + * transform payloads must be inspected. + * + * @generated from protobuf enum value: REQUIRES_TIME_SORTED_INPUT = 3; + */ + REQUIRES_TIME_SORTED_INPUT = 3, + /** + * This requirement indicates the restriction_coder_id field of ParDo + * transform payloads must be inspected. + * + * @generated from protobuf enum value: REQUIRES_SPLITTABLE_DOFN = 4; + */ + REQUIRES_SPLITTABLE_DOFN = 4 +} +/** + * A URN along with a parameter object whose schema is determined by the + * URN. + * + * This structure is reused in two distinct, but compatible, ways: + * + * 1. This can be a specification of the function over PCollections + * that a PTransform computes. + * 2. This can be a specification of a user-defined function, possibly + * SDK-specific. (external to this message must be adequate context + * to indicate the environment in which the UDF can be understood). + * + * Though not explicit in this proto, there are two possibilities + * for the relationship of a runner to this specification that + * one should bear in mind: + * + * 1. The runner understands the URN. For example, it might be + * a well-known URN like "beam:transform:Top" or + * "beam:window_fn:FixedWindows" with + * an agreed-upon payload (e.g. a number or duration, + * respectively). + * 2. The runner does not understand the URN. It might be an + * SDK specific URN such as "beam:dofn:javasdk:1.0" + * that indicates to the SDK what the payload is, + * such as a serialized Java DoFn from a particular + * version of the Beam Java SDK. The payload will often + * then be an opaque message such as bytes in a + * language-specific serialization format. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.FunctionSpec + */ +export interface FunctionSpec { + /** + * (Required) A URN that describes the accompanying payload. + * For any URN that is not recognized (by whomever is inspecting + * it) the parameter payload should be treated as opaque and + * passed as-is. + * + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * (Optional) The data specifying any parameters to the URN. If + * the URN does not require any arguments, this may be omitted. + * + * @generated from protobuf field: bytes payload = 3; + */ + payload: Uint8Array; +} +/** + * A set of well known URNs describing display data. + * + * All descriptions must contain how the value should be classified and how it + * is encoded. Note that some types are logical types which convey contextual + * information about the pipeline in addition to an encoding while others only + * specify the encoding itself. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardDisplayData + */ +export interface StandardDisplayData { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardDisplayData.DisplayData + */ +export enum StandardDisplayData_DisplayData { + /** + * A string label and value. Has a payload containing an encoded + * LabelledPayload. + * + * @generated from protobuf enum value: LABELLED = 0; + */ + LABELLED = 0 +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.LabelledPayload + */ +export interface LabelledPayload { + /** + * (Required) A human readable label for the value. + * + * @generated from protobuf field: string label = 1; + */ + label: string; + /** + * @generated from protobuf oneof: value + */ + value: { + oneofKind: "stringValue"; + /** + * @generated from protobuf field: string string_value = 2; + */ + stringValue: string; + } | { + oneofKind: "boolValue"; + /** + * @generated from protobuf field: bool bool_value = 3; + */ + boolValue: boolean; + } | { + oneofKind: "doubleValue"; + /** + * @generated from protobuf field: double double_value = 4; + */ + doubleValue: number; + } | { + oneofKind: "intValue"; + /** + * @generated from protobuf field: int64 int_value = 5; + */ + intValue: bigint; + } | { + oneofKind: undefined; + }; + /** + * (Required) The key identifies the actual content of the metadata. + * + * @generated from protobuf field: string key = 6; + */ + key: string; + /** + * (Required) The namespace describes the context that specified the key. + * + * @generated from protobuf field: string namespace = 7; + */ + namespace: string; +} +/** + * Static display data associated with a pipeline component. Display data is + * useful for pipeline runners IOs and diagnostic dashboards to display details + * about annotated components. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.DisplayData + */ +export interface DisplayData { + /** + * A key used to describe the type of display data. See StandardDisplayData + * for the set of well known urns describing how the payload is meant to be + * interpreted. + * + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * (Optional) The data specifying any parameters to the URN. If + * the URN does not require any arguments, this may be omitted. + * + * @generated from protobuf field: bytes payload = 2; + */ + payload: Uint8Array; +} +// The following transforms are not part of the RunnerApi specification, +// but may be useful for graph construction and manipulation. + +/** + * A disjoint union of all the things that may contain references + * that require Components to resolve. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MessageWithComponents + */ +export interface MessageWithComponents { + /** + * (Optional) The by-reference components of the root message, + * enabling a standalone message. + * + * If this is absent, it is expected that there are no + * references. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Components components = 1; + */ + components?: Components; + /** + * @generated from protobuf oneof: root + */ + root: { + oneofKind: "coder"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Coder coder = 2; + */ + coder: Coder; + } | { + oneofKind: "combinePayload"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.CombinePayload combine_payload = 3; + */ + combinePayload: CombinePayload; + } | { + oneofKind: "functionSpec"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FunctionSpec function_spec = 4; + */ + functionSpec: FunctionSpec; + } | { + oneofKind: "parDoPayload"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ParDoPayload par_do_payload = 6; + */ + parDoPayload: ParDoPayload; + } | { + oneofKind: "ptransform"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.PTransform ptransform = 7; + */ + ptransform: PTransform; + } | { + oneofKind: "pcollection"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.PCollection pcollection = 8; + */ + pcollection: PCollection; + } | { + oneofKind: "readPayload"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ReadPayload read_payload = 9; + */ + readPayload: ReadPayload; + } | { + oneofKind: "sideInput"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.SideInput side_input = 11; + */ + sideInput: SideInput; + } | { + oneofKind: "windowIntoPayload"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.WindowIntoPayload window_into_payload = 12; + */ + windowIntoPayload: WindowIntoPayload; + } | { + oneofKind: "windowingStrategy"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.WindowingStrategy windowing_strategy = 13; + */ + windowingStrategy: WindowingStrategy; + } | { + oneofKind: undefined; + }; +} +/** + * The payload for an executable stage. This will eventually be passed to an SDK in the form of a + * ProcessBundleDescriptor. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload + */ +export interface ExecutableStagePayload { + /** + * (Required) Environment in which this stage executes. + * + * We use an environment rather than environment id + * because ExecutableStages use environments directly. This may change in the future. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Environment environment = 1; + */ + environment?: Environment; + /** + * The wire coder settings of this executable stage + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.WireCoderSetting wire_coder_settings = 9; + */ + wireCoderSettings: ExecutableStagePayload_WireCoderSetting[]; + /** + * (Required) Input PCollection id. This must be present as a value in the inputs of any + * PTransform the ExecutableStagePayload is the payload of. + * + * @generated from protobuf field: string input = 2; + */ + input: string; + /** + * The side inputs required for this executable stage. Each side input of each PTransform within + * this ExecutableStagePayload must be represented within this field. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.SideInputId side_inputs = 3; + */ + sideInputs: ExecutableStagePayload_SideInputId[]; + /** + * PTransform ids contained within this executable stage. This must contain at least one + * PTransform id. + * + * @generated from protobuf field: repeated string transforms = 4; + */ + transforms: string[]; + /** + * Output PCollection ids. This must be equal to the values of the outputs of any + * PTransform the ExecutableStagePayload is the payload of. + * + * @generated from protobuf field: repeated string outputs = 5; + */ + outputs: string[]; + /** + * (Required) The components for the Executable Stage. This must contain all of the Transforms + * in transforms, and the closure of all of the components they recognize. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Components components = 6; + */ + components?: Components; + /** + * The user states required for this executable stage. Each user state of each PTransform within + * this ExecutableStagePayload must be represented within this field. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.UserStateId user_states = 7; + */ + userStates: ExecutableStagePayload_UserStateId[]; + /** + * The timers required for this executable stage. Each timer of each PTransform within + * this ExecutableStagePayload must be represented within this field. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId timers = 8; + */ + timers: ExecutableStagePayload_TimerId[]; + /** + * The timerfamilies required for this executable stage. Each timer familyof each PTransform within + * this ExecutableStagePayload must be represented within this field. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerFamilyId timerFamilies = 10; + */ + timerFamilies: ExecutableStagePayload_TimerFamilyId[]; +} +/** + * A reference to a side input. Side inputs are uniquely identified by PTransform id and + * local name. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.SideInputId + */ +export interface ExecutableStagePayload_SideInputId { + /** + * (Required) The id of the PTransform that references this side input. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The local name of this side input from the PTransform that references it. + * + * @generated from protobuf field: string local_name = 2; + */ + localName: string; +} +/** + * A reference to user state. User states are uniquely identified by PTransform id and + * local name. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.UserStateId + */ +export interface ExecutableStagePayload_UserStateId { + /** + * (Required) The id of the PTransform that references this user state. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The local name of this user state for the PTransform that references it. + * + * @generated from protobuf field: string local_name = 2; + */ + localName: string; +} +/** + * A reference to a timer. Timers are uniquely identified by PTransform id and + * local name. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId + */ +export interface ExecutableStagePayload_TimerId { + /** + * (Required) The id of the PTransform that references this timer. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The local name of this timer for the PTransform that references it. + * + * @generated from protobuf field: string local_name = 2; + */ + localName: string; +} +/** + * A reference to a timer. Timers are uniquely identified by PTransform id and + * local name. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerFamilyId + */ +export interface ExecutableStagePayload_TimerFamilyId { + /** + * (Required) The id of the PTransform that references this timer family. + * + * @generated from protobuf field: string transform_id = 1; + */ + transformId: string; + /** + * (Required) The local name of this timer family for the PTransform that references it. + * + * @generated from protobuf field: string local_name = 2; + */ + localName: string; +} +/** + * Settings that decide the coder type of wire coder. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.WireCoderSetting + */ +export interface ExecutableStagePayload_WireCoderSetting { + /** + * (Required) The URN of the wire coder. + * Note that only windowed value coder or parameterized windowed value coder are supported. + * + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * (Optional) The data specifying any parameters to the URN. If + * the URN is beam:coder:windowed_value:v1, this may be omitted. If the URN is + * beam:coder:param_windowed_value:v1, the payload is an encoded windowed + * value using the beam:coder:windowed_value:v1 coder parameterized by + * a beam:coder:bytes:v1 element coder and the window coder that this + * param_windowed_value coder uses. + * + * @generated from protobuf field: bytes payload = 2; + */ + payload: Uint8Array; + /** + * @generated from protobuf oneof: target + */ + target: { + oneofKind: "inputOrOutputId"; + /** + * The input or output PCollection id this setting applies to. + * + * @generated from protobuf field: string input_or_output_id = 3; + */ + inputOrOutputId: string; + } | { + oneofKind: "timer"; + /** + * The timer id this setting applies to. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId timer = 4; + */ + timer: ExecutableStagePayload_TimerId; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.StandardResourceHints + */ +export interface StandardResourceHints { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.StandardResourceHints.Enum + */ +export enum StandardResourceHints_Enum { + /** + * Describes hardware accelerators that are desired to have in the execution environment. + * + * @generated from protobuf enum value: ACCELERATOR = 0; + */ + ACCELERATOR = 0, + /** + * Describes desired minimal available RAM size in transform's execution environment. + * SDKs should convert the size to bytes, but can allow users to specify human-friendly units (e.g. GiB). + * + * @generated from protobuf enum value: MIN_RAM_BYTES = 1; + */ + MIN_RAM_BYTES = 1 +} +// @generated message type with reflection information, may provide speed optimized methods +class BeamConstants$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.BeamConstants", []); + } + create(value?: PartialMessage): BeamConstants { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BeamConstants): BeamConstants { + return target ?? this.create(); + } + internalBinaryWrite(message: BeamConstants, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.BeamConstants + */ +export const BeamConstants = new BeamConstants$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Components$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Components", [ + { no: 1, name: "transforms", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => PTransform } }, + { no: 2, name: "pcollections", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => PCollection } }, + { no: 3, name: "windowing_strategies", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => WindowingStrategy } }, + { no: 4, name: "coders", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => Coder } }, + { no: 5, name: "environments", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => Environment } } + ]); + } + create(value?: PartialMessage): Components { + const message = { transforms: {}, pcollections: {}, windowingStrategies: {}, coders: {}, environments: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Components): Components { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* map transforms */ 1: + this.binaryReadMap1(message.transforms, reader, options); + break; + case /* map pcollections */ 2: + this.binaryReadMap2(message.pcollections, reader, options); + break; + case /* map windowing_strategies */ 3: + this.binaryReadMap3(message.windowingStrategies, reader, options); + break; + case /* map coders */ 4: + this.binaryReadMap4(message.coders, reader, options); + break; + case /* map environments */ 5: + this.binaryReadMap5(message.environments, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap1(map: Components["transforms"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof Components["transforms"] | undefined, val: Components["transforms"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = PTransform.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.Components.transforms"); + } + } + map[key ?? ""] = val ?? PTransform.create(); + } + private binaryReadMap2(map: Components["pcollections"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof Components["pcollections"] | undefined, val: Components["pcollections"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = PCollection.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.Components.pcollections"); + } + } + map[key ?? ""] = val ?? PCollection.create(); + } + private binaryReadMap3(map: Components["windowingStrategies"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof Components["windowingStrategies"] | undefined, val: Components["windowingStrategies"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = WindowingStrategy.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.Components.windowing_strategies"); + } + } + map[key ?? ""] = val ?? WindowingStrategy.create(); + } + private binaryReadMap4(map: Components["coders"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof Components["coders"] | undefined, val: Components["coders"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = Coder.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.Components.coders"); + } + } + map[key ?? ""] = val ?? Coder.create(); + } + private binaryReadMap5(map: Components["environments"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof Components["environments"] | undefined, val: Components["environments"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = Environment.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.Components.environments"); + } + } + map[key ?? ""] = val ?? Environment.create(); + } + internalBinaryWrite(message: Components, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* map transforms = 1; */ + for (let k of Object.keys(message.transforms)) { + writer.tag(1, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + PTransform.internalBinaryWrite(message.transforms[k], writer, options); + writer.join().join(); + } + /* map pcollections = 2; */ + for (let k of Object.keys(message.pcollections)) { + writer.tag(2, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + PCollection.internalBinaryWrite(message.pcollections[k], writer, options); + writer.join().join(); + } + /* map windowing_strategies = 3; */ + for (let k of Object.keys(message.windowingStrategies)) { + writer.tag(3, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + WindowingStrategy.internalBinaryWrite(message.windowingStrategies[k], writer, options); + writer.join().join(); + } + /* map coders = 4; */ + for (let k of Object.keys(message.coders)) { + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + Coder.internalBinaryWrite(message.coders[k], writer, options); + writer.join().join(); + } + /* map environments = 5; */ + for (let k of Object.keys(message.environments)) { + writer.tag(5, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + Environment.internalBinaryWrite(message.environments[k], writer, options); + writer.join().join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Components + */ +export const Components = new Components$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Pipeline$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Pipeline", [ + { no: 1, name: "components", kind: "message", T: () => Components }, + { no: 2, name: "root_transform_ids", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "display_data", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => DisplayData }, + { no: 4, name: "requirements", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): Pipeline { + const message = { rootTransformIds: [], displayData: [], requirements: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Pipeline): Pipeline { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Components components */ 1: + message.components = Components.internalBinaryRead(reader, reader.uint32(), options, message.components); + break; + case /* repeated string root_transform_ids */ 2: + message.rootTransformIds.push(reader.string()); + break; + case /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data */ 3: + message.displayData.push(DisplayData.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated string requirements */ 4: + message.requirements.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Pipeline, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Components components = 1; */ + if (message.components) + Components.internalBinaryWrite(message.components, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated string root_transform_ids = 2; */ + for (let i = 0; i < message.rootTransformIds.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.rootTransformIds[i]); + /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 3; */ + for (let i = 0; i < message.displayData.length; i++) + DisplayData.internalBinaryWrite(message.displayData[i], writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* repeated string requirements = 4; */ + for (let i = 0; i < message.requirements.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.requirements[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Pipeline + */ +export const Pipeline = new Pipeline$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PTransform$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.PTransform", [ + { no: 5, name: "unique_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 1, name: "spec", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "subtransforms", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "inputs", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } }, + { no: 4, name: "outputs", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } }, + { no: 6, name: "display_data", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => DisplayData }, + { no: 7, name: "environment_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 8, name: "annotations", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 12 /*ScalarType.BYTES*/ } } + ]); + } + create(value?: PartialMessage): PTransform { + const message = { uniqueName: "", subtransforms: [], inputs: {}, outputs: {}, displayData: [], environmentId: "", annotations: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PTransform): PTransform { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string unique_name */ 5: + message.uniqueName = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.FunctionSpec spec */ 1: + message.spec = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.spec); + break; + case /* repeated string subtransforms */ 2: + message.subtransforms.push(reader.string()); + break; + case /* map inputs */ 3: + this.binaryReadMap3(message.inputs, reader, options); + break; + case /* map outputs */ 4: + this.binaryReadMap4(message.outputs, reader, options); + break; + case /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data */ 6: + message.displayData.push(DisplayData.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* string environment_id */ 7: + message.environmentId = reader.string(); + break; + case /* map annotations */ 8: + this.binaryReadMap8(message.annotations, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap3(map: PTransform["inputs"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof PTransform["inputs"] | undefined, val: PTransform["inputs"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.PTransform.inputs"); + } + } + map[key ?? ""] = val ?? ""; + } + private binaryReadMap4(map: PTransform["outputs"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof PTransform["outputs"] | undefined, val: PTransform["outputs"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.PTransform.outputs"); + } + } + map[key ?? ""] = val ?? ""; + } + private binaryReadMap8(map: PTransform["annotations"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof PTransform["annotations"] | undefined, val: PTransform["annotations"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.bytes(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.PTransform.annotations"); + } + } + map[key ?? ""] = val ?? new Uint8Array(0); + } + internalBinaryWrite(message: PTransform, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string unique_name = 5; */ + if (message.uniqueName !== "") + writer.tag(5, WireType.LengthDelimited).string(message.uniqueName); + /* org.apache.beam.model.pipeline.v1.FunctionSpec spec = 1; */ + if (message.spec) + FunctionSpec.internalBinaryWrite(message.spec, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated string subtransforms = 2; */ + for (let i = 0; i < message.subtransforms.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.subtransforms[i]); + /* map inputs = 3; */ + for (let k of Object.keys(message.inputs)) + writer.tag(3, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.inputs[k]).join(); + /* map outputs = 4; */ + for (let k of Object.keys(message.outputs)) + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.outputs[k]).join(); + /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 6; */ + for (let i = 0; i < message.displayData.length; i++) + DisplayData.internalBinaryWrite(message.displayData[i], writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* string environment_id = 7; */ + if (message.environmentId !== "") + writer.tag(7, WireType.LengthDelimited).string(message.environmentId); + /* map annotations = 8; */ + for (let k of Object.keys(message.annotations)) + writer.tag(8, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).bytes(message.annotations[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.PTransform + */ +export const PTransform = new PTransform$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardPTransforms$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardPTransforms", []); + } + create(value?: PartialMessage): StandardPTransforms { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardPTransforms): StandardPTransforms { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardPTransforms, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardPTransforms + */ +export const StandardPTransforms = new StandardPTransforms$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardSideInputTypes$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardSideInputTypes", []); + } + create(value?: PartialMessage): StandardSideInputTypes { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardSideInputTypes): StandardSideInputTypes { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardSideInputTypes, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardSideInputTypes + */ +export const StandardSideInputTypes = new StandardSideInputTypes$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PCollection$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.PCollection", [ + { no: 1, name: "unique_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "is_bounded", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.IsBounded.Enum", IsBounded_Enum] }, + { no: 4, name: "windowing_strategy_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "display_data", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => DisplayData } + ]); + } + create(value?: PartialMessage): PCollection { + const message = { uniqueName: "", coderId: "", isBounded: 0, windowingStrategyId: "", displayData: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PCollection): PCollection { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string unique_name */ 1: + message.uniqueName = reader.string(); + break; + case /* string coder_id */ 2: + message.coderId = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded */ 3: + message.isBounded = reader.int32(); + break; + case /* string windowing_strategy_id */ 4: + message.windowingStrategyId = reader.string(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data */ 5: + message.displayData.push(DisplayData.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PCollection, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string unique_name = 1; */ + if (message.uniqueName !== "") + writer.tag(1, WireType.LengthDelimited).string(message.uniqueName); + /* string coder_id = 2; */ + if (message.coderId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.coderId); + /* org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded = 3; */ + if (message.isBounded !== 0) + writer.tag(3, WireType.Varint).int32(message.isBounded); + /* string windowing_strategy_id = 4; */ + if (message.windowingStrategyId !== "") + writer.tag(4, WireType.LengthDelimited).string(message.windowingStrategyId); + /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 5; */ + for (let i = 0; i < message.displayData.length; i++) + DisplayData.internalBinaryWrite(message.displayData[i], writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.PCollection + */ +export const PCollection = new PCollection$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ParDoPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ParDoPayload", [ + { no: 1, name: "do_fn", kind: "message", T: () => FunctionSpec }, + { no: 3, name: "side_inputs", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => SideInput } }, + { no: 4, name: "state_specs", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => StateSpec } }, + { no: 9, name: "timer_family_specs", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => TimerFamilySpec } }, + { no: 7, name: "restriction_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 8, name: "requests_finalization", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 10, name: "requires_time_sorted_input", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 11, name: "requires_stable_input", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + create(value?: PartialMessage): ParDoPayload { + const message = { sideInputs: {}, stateSpecs: {}, timerFamilySpecs: {}, restrictionCoderId: "", requestsFinalization: false, requiresTimeSortedInput: false, requiresStableInput: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ParDoPayload): ParDoPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec do_fn */ 1: + message.doFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.doFn); + break; + case /* map side_inputs */ 3: + this.binaryReadMap3(message.sideInputs, reader, options); + break; + case /* map state_specs */ 4: + this.binaryReadMap4(message.stateSpecs, reader, options); + break; + case /* map timer_family_specs */ 9: + this.binaryReadMap9(message.timerFamilySpecs, reader, options); + break; + case /* string restriction_coder_id */ 7: + message.restrictionCoderId = reader.string(); + break; + case /* bool requests_finalization */ 8: + message.requestsFinalization = reader.bool(); + break; + case /* bool requires_time_sorted_input */ 10: + message.requiresTimeSortedInput = reader.bool(); + break; + case /* bool requires_stable_input */ 11: + message.requiresStableInput = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap3(map: ParDoPayload["sideInputs"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ParDoPayload["sideInputs"] | undefined, val: ParDoPayload["sideInputs"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = SideInput.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.ParDoPayload.side_inputs"); + } + } + map[key ?? ""] = val ?? SideInput.create(); + } + private binaryReadMap4(map: ParDoPayload["stateSpecs"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ParDoPayload["stateSpecs"] | undefined, val: ParDoPayload["stateSpecs"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = StateSpec.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.ParDoPayload.state_specs"); + } + } + map[key ?? ""] = val ?? StateSpec.create(); + } + private binaryReadMap9(map: ParDoPayload["timerFamilySpecs"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ParDoPayload["timerFamilySpecs"] | undefined, val: ParDoPayload["timerFamilySpecs"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = TimerFamilySpec.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.ParDoPayload.timer_family_specs"); + } + } + map[key ?? ""] = val ?? TimerFamilySpec.create(); + } + internalBinaryWrite(message: ParDoPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec do_fn = 1; */ + if (message.doFn) + FunctionSpec.internalBinaryWrite(message.doFn, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* map side_inputs = 3; */ + for (let k of Object.keys(message.sideInputs)) { + writer.tag(3, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + SideInput.internalBinaryWrite(message.sideInputs[k], writer, options); + writer.join().join(); + } + /* map state_specs = 4; */ + for (let k of Object.keys(message.stateSpecs)) { + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + StateSpec.internalBinaryWrite(message.stateSpecs[k], writer, options); + writer.join().join(); + } + /* map timer_family_specs = 9; */ + for (let k of Object.keys(message.timerFamilySpecs)) { + writer.tag(9, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + TimerFamilySpec.internalBinaryWrite(message.timerFamilySpecs[k], writer, options); + writer.join().join(); + } + /* string restriction_coder_id = 7; */ + if (message.restrictionCoderId !== "") + writer.tag(7, WireType.LengthDelimited).string(message.restrictionCoderId); + /* bool requests_finalization = 8; */ + if (message.requestsFinalization !== false) + writer.tag(8, WireType.Varint).bool(message.requestsFinalization); + /* bool requires_time_sorted_input = 10; */ + if (message.requiresTimeSortedInput !== false) + writer.tag(10, WireType.Varint).bool(message.requiresTimeSortedInput); + /* bool requires_stable_input = 11; */ + if (message.requiresStableInput !== false) + writer.tag(11, WireType.Varint).bool(message.requiresStableInput); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ParDoPayload + */ +export const ParDoPayload = new ParDoPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StateSpec", [ + { no: 1, name: "read_modify_write_spec", kind: "message", oneof: "spec", T: () => ReadModifyWriteStateSpec }, + { no: 2, name: "bag_spec", kind: "message", oneof: "spec", T: () => BagStateSpec }, + { no: 3, name: "combining_spec", kind: "message", oneof: "spec", T: () => CombiningStateSpec }, + { no: 4, name: "map_spec", kind: "message", oneof: "spec", T: () => MapStateSpec }, + { no: 5, name: "set_spec", kind: "message", oneof: "spec", T: () => SetStateSpec }, + { no: 6, name: "ordered_list_spec", kind: "message", oneof: "spec", T: () => OrderedListStateSpec } + ]); + } + create(value?: PartialMessage): StateSpec { + const message = { spec: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StateSpec): StateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.ReadModifyWriteStateSpec read_modify_write_spec */ 1: + message.spec = { + oneofKind: "readModifyWriteSpec", + readModifyWriteSpec: ReadModifyWriteStateSpec.internalBinaryRead(reader, reader.uint32(), options, (message.spec as any).readModifyWriteSpec) + }; + break; + case /* org.apache.beam.model.pipeline.v1.BagStateSpec bag_spec */ 2: + message.spec = { + oneofKind: "bagSpec", + bagSpec: BagStateSpec.internalBinaryRead(reader, reader.uint32(), options, (message.spec as any).bagSpec) + }; + break; + case /* org.apache.beam.model.pipeline.v1.CombiningStateSpec combining_spec */ 3: + message.spec = { + oneofKind: "combiningSpec", + combiningSpec: CombiningStateSpec.internalBinaryRead(reader, reader.uint32(), options, (message.spec as any).combiningSpec) + }; + break; + case /* org.apache.beam.model.pipeline.v1.MapStateSpec map_spec */ 4: + message.spec = { + oneofKind: "mapSpec", + mapSpec: MapStateSpec.internalBinaryRead(reader, reader.uint32(), options, (message.spec as any).mapSpec) + }; + break; + case /* org.apache.beam.model.pipeline.v1.SetStateSpec set_spec */ 5: + message.spec = { + oneofKind: "setSpec", + setSpec: SetStateSpec.internalBinaryRead(reader, reader.uint32(), options, (message.spec as any).setSpec) + }; + break; + case /* org.apache.beam.model.pipeline.v1.OrderedListStateSpec ordered_list_spec */ 6: + message.spec = { + oneofKind: "orderedListSpec", + orderedListSpec: OrderedListStateSpec.internalBinaryRead(reader, reader.uint32(), options, (message.spec as any).orderedListSpec) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: StateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.ReadModifyWriteStateSpec read_modify_write_spec = 1; */ + if (message.spec.oneofKind === "readModifyWriteSpec") + ReadModifyWriteStateSpec.internalBinaryWrite(message.spec.readModifyWriteSpec, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.BagStateSpec bag_spec = 2; */ + if (message.spec.oneofKind === "bagSpec") + BagStateSpec.internalBinaryWrite(message.spec.bagSpec, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.CombiningStateSpec combining_spec = 3; */ + if (message.spec.oneofKind === "combiningSpec") + CombiningStateSpec.internalBinaryWrite(message.spec.combiningSpec, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.MapStateSpec map_spec = 4; */ + if (message.spec.oneofKind === "mapSpec") + MapStateSpec.internalBinaryWrite(message.spec.mapSpec, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.SetStateSpec set_spec = 5; */ + if (message.spec.oneofKind === "setSpec") + SetStateSpec.internalBinaryWrite(message.spec.setSpec, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.OrderedListStateSpec ordered_list_spec = 6; */ + if (message.spec.oneofKind === "orderedListSpec") + OrderedListStateSpec.internalBinaryWrite(message.spec.orderedListSpec, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StateSpec + */ +export const StateSpec = new StateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ReadModifyWriteStateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ReadModifyWriteStateSpec", [ + { no: 1, name: "coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ReadModifyWriteStateSpec { + const message = { coderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ReadModifyWriteStateSpec): ReadModifyWriteStateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string coder_id */ 1: + message.coderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ReadModifyWriteStateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string coder_id = 1; */ + if (message.coderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.coderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ReadModifyWriteStateSpec + */ +export const ReadModifyWriteStateSpec = new ReadModifyWriteStateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class BagStateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.BagStateSpec", [ + { no: 1, name: "element_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): BagStateSpec { + const message = { elementCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BagStateSpec): BagStateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string element_coder_id */ 1: + message.elementCoderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: BagStateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string element_coder_id = 1; */ + if (message.elementCoderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.elementCoderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.BagStateSpec + */ +export const BagStateSpec = new BagStateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class OrderedListStateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.OrderedListStateSpec", [ + { no: 1, name: "element_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): OrderedListStateSpec { + const message = { elementCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: OrderedListStateSpec): OrderedListStateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string element_coder_id */ 1: + message.elementCoderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: OrderedListStateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string element_coder_id = 1; */ + if (message.elementCoderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.elementCoderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.OrderedListStateSpec + */ +export const OrderedListStateSpec = new OrderedListStateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CombiningStateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.CombiningStateSpec", [ + { no: 1, name: "accumulator_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "combine_fn", kind: "message", T: () => FunctionSpec } + ]); + } + create(value?: PartialMessage): CombiningStateSpec { + const message = { accumulatorCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CombiningStateSpec): CombiningStateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string accumulator_coder_id */ 1: + message.accumulatorCoderId = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.FunctionSpec combine_fn */ 2: + message.combineFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.combineFn); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CombiningStateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string accumulator_coder_id = 1; */ + if (message.accumulatorCoderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.accumulatorCoderId); + /* org.apache.beam.model.pipeline.v1.FunctionSpec combine_fn = 2; */ + if (message.combineFn) + FunctionSpec.internalBinaryWrite(message.combineFn, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.CombiningStateSpec + */ +export const CombiningStateSpec = new CombiningStateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MapStateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MapStateSpec", [ + { no: 1, name: "key_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "value_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): MapStateSpec { + const message = { keyCoderId: "", valueCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MapStateSpec): MapStateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key_coder_id */ 1: + message.keyCoderId = reader.string(); + break; + case /* string value_coder_id */ 2: + message.valueCoderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MapStateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key_coder_id = 1; */ + if (message.keyCoderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.keyCoderId); + /* string value_coder_id = 2; */ + if (message.valueCoderId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.valueCoderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MapStateSpec + */ +export const MapStateSpec = new MapStateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class SetStateSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.SetStateSpec", [ + { no: 1, name: "element_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): SetStateSpec { + const message = { elementCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SetStateSpec): SetStateSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string element_coder_id */ 1: + message.elementCoderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: SetStateSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string element_coder_id = 1; */ + if (message.elementCoderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.elementCoderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.SetStateSpec + */ +export const SetStateSpec = new SetStateSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TimerFamilySpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TimerFamilySpec", [ + { no: 1, name: "time_domain", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.TimeDomain.Enum", TimeDomain_Enum] }, + { no: 2, name: "timer_family_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): TimerFamilySpec { + const message = { timeDomain: 0, timerFamilyCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TimerFamilySpec): TimerFamilySpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.TimeDomain.Enum time_domain */ 1: + message.timeDomain = reader.int32(); + break; + case /* string timer_family_coder_id */ 2: + message.timerFamilyCoderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TimerFamilySpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.TimeDomain.Enum time_domain = 1; */ + if (message.timeDomain !== 0) + writer.tag(1, WireType.Varint).int32(message.timeDomain); + /* string timer_family_coder_id = 2; */ + if (message.timerFamilyCoderId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.timerFamilyCoderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TimerFamilySpec + */ +export const TimerFamilySpec = new TimerFamilySpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class IsBounded$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.IsBounded", []); + } + create(value?: PartialMessage): IsBounded { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: IsBounded): IsBounded { + return target ?? this.create(); + } + internalBinaryWrite(message: IsBounded, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.IsBounded + */ +export const IsBounded = new IsBounded$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ReadPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ReadPayload", [ + { no: 1, name: "source", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "is_bounded", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.IsBounded.Enum", IsBounded_Enum] } + ]); + } + create(value?: PartialMessage): ReadPayload { + const message = { isBounded: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ReadPayload): ReadPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec source */ 1: + message.source = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.source); + break; + case /* org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded */ 2: + message.isBounded = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ReadPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec source = 1; */ + if (message.source) + FunctionSpec.internalBinaryWrite(message.source, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.IsBounded.Enum is_bounded = 2; */ + if (message.isBounded !== 0) + writer.tag(2, WireType.Varint).int32(message.isBounded); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ReadPayload + */ +export const ReadPayload = new ReadPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class WindowIntoPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.WindowIntoPayload", [ + { no: 1, name: "window_fn", kind: "message", T: () => FunctionSpec } + ]); + } + create(value?: PartialMessage): WindowIntoPayload { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: WindowIntoPayload): WindowIntoPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec window_fn */ 1: + message.windowFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.windowFn); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: WindowIntoPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec window_fn = 1; */ + if (message.windowFn) + FunctionSpec.internalBinaryWrite(message.windowFn, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.WindowIntoPayload + */ +export const WindowIntoPayload = new WindowIntoPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class CombinePayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.CombinePayload", [ + { no: 1, name: "combine_fn", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "accumulator_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): CombinePayload { + const message = { accumulatorCoderId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: CombinePayload): CombinePayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec combine_fn */ 1: + message.combineFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.combineFn); + break; + case /* string accumulator_coder_id */ 2: + message.accumulatorCoderId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: CombinePayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec combine_fn = 1; */ + if (message.combineFn) + FunctionSpec.internalBinaryWrite(message.combineFn, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string accumulator_coder_id = 2; */ + if (message.accumulatorCoderId !== "") + writer.tag(2, WireType.LengthDelimited).string(message.accumulatorCoderId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.CombinePayload + */ +export const CombinePayload = new CombinePayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TestStreamPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TestStreamPayload", [ + { no: 1, name: "coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "events", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => TestStreamPayload_Event }, + { no: 3, name: "endpoint", kind: "message", T: () => ApiServiceDescriptor } + ]); + } + create(value?: PartialMessage): TestStreamPayload { + const message = { coderId: "", events: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TestStreamPayload): TestStreamPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string coder_id */ 1: + message.coderId = reader.string(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.TestStreamPayload.Event events */ 2: + message.events.push(TestStreamPayload_Event.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor endpoint */ 3: + message.endpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.endpoint); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TestStreamPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string coder_id = 1; */ + if (message.coderId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.coderId); + /* repeated org.apache.beam.model.pipeline.v1.TestStreamPayload.Event events = 2; */ + for (let i = 0; i < message.events.length; i++) + TestStreamPayload_Event.internalBinaryWrite(message.events[i], writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor endpoint = 3; */ + if (message.endpoint) + ApiServiceDescriptor.internalBinaryWrite(message.endpoint, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload + */ +export const TestStreamPayload = new TestStreamPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TestStreamPayload_Event$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TestStreamPayload.Event", [ + { no: 1, name: "watermark_event", kind: "message", oneof: "event", T: () => TestStreamPayload_Event_AdvanceWatermark }, + { no: 2, name: "processing_time_event", kind: "message", oneof: "event", T: () => TestStreamPayload_Event_AdvanceProcessingTime }, + { no: 3, name: "element_event", kind: "message", oneof: "event", T: () => TestStreamPayload_Event_AddElements } + ]); + } + create(value?: PartialMessage): TestStreamPayload_Event { + const message = { event: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TestStreamPayload_Event): TestStreamPayload_Event { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceWatermark watermark_event */ 1: + message.event = { + oneofKind: "watermarkEvent", + watermarkEvent: TestStreamPayload_Event_AdvanceWatermark.internalBinaryRead(reader, reader.uint32(), options, (message.event as any).watermarkEvent) + }; + break; + case /* org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceProcessingTime processing_time_event */ 2: + message.event = { + oneofKind: "processingTimeEvent", + processingTimeEvent: TestStreamPayload_Event_AdvanceProcessingTime.internalBinaryRead(reader, reader.uint32(), options, (message.event as any).processingTimeEvent) + }; + break; + case /* org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AddElements element_event */ 3: + message.event = { + oneofKind: "elementEvent", + elementEvent: TestStreamPayload_Event_AddElements.internalBinaryRead(reader, reader.uint32(), options, (message.event as any).elementEvent) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TestStreamPayload_Event, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceWatermark watermark_event = 1; */ + if (message.event.oneofKind === "watermarkEvent") + TestStreamPayload_Event_AdvanceWatermark.internalBinaryWrite(message.event.watermarkEvent, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceProcessingTime processing_time_event = 2; */ + if (message.event.oneofKind === "processingTimeEvent") + TestStreamPayload_Event_AdvanceProcessingTime.internalBinaryWrite(message.event.processingTimeEvent, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AddElements element_event = 3; */ + if (message.event.oneofKind === "elementEvent") + TestStreamPayload_Event_AddElements.internalBinaryWrite(message.event.elementEvent, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event + */ +export const TestStreamPayload_Event = new TestStreamPayload_Event$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TestStreamPayload_Event_AdvanceWatermark$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceWatermark", [ + { no: 1, name: "new_watermark", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "tag", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): TestStreamPayload_Event_AdvanceWatermark { + const message = { newWatermark: 0n, tag: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TestStreamPayload_Event_AdvanceWatermark): TestStreamPayload_Event_AdvanceWatermark { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 new_watermark */ 1: + message.newWatermark = reader.int64().toBigInt(); + break; + case /* string tag */ 2: + message.tag = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TestStreamPayload_Event_AdvanceWatermark, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 new_watermark = 1; */ + if (message.newWatermark !== 0n) + writer.tag(1, WireType.Varint).int64(message.newWatermark); + /* string tag = 2; */ + if (message.tag !== "") + writer.tag(2, WireType.LengthDelimited).string(message.tag); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceWatermark + */ +export const TestStreamPayload_Event_AdvanceWatermark = new TestStreamPayload_Event_AdvanceWatermark$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TestStreamPayload_Event_AdvanceProcessingTime$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceProcessingTime", [ + { no: 1, name: "advance_duration", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): TestStreamPayload_Event_AdvanceProcessingTime { + const message = { advanceDuration: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TestStreamPayload_Event_AdvanceProcessingTime): TestStreamPayload_Event_AdvanceProcessingTime { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 advance_duration */ 1: + message.advanceDuration = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TestStreamPayload_Event_AdvanceProcessingTime, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 advance_duration = 1; */ + if (message.advanceDuration !== 0n) + writer.tag(1, WireType.Varint).int64(message.advanceDuration); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AdvanceProcessingTime + */ +export const TestStreamPayload_Event_AdvanceProcessingTime = new TestStreamPayload_Event_AdvanceProcessingTime$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TestStreamPayload_Event_AddElements$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AddElements", [ + { no: 1, name: "elements", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => TestStreamPayload_TimestampedElement }, + { no: 3, name: "tag", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): TestStreamPayload_Event_AddElements { + const message = { elements: [], tag: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TestStreamPayload_Event_AddElements): TestStreamPayload_Event_AddElements { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.TestStreamPayload.TimestampedElement elements */ 1: + message.elements.push(TestStreamPayload_TimestampedElement.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* string tag */ 3: + message.tag = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TestStreamPayload_Event_AddElements, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.TestStreamPayload.TimestampedElement elements = 1; */ + for (let i = 0; i < message.elements.length; i++) + TestStreamPayload_TimestampedElement.internalBinaryWrite(message.elements[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string tag = 3; */ + if (message.tag !== "") + writer.tag(3, WireType.LengthDelimited).string(message.tag); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.Event.AddElements + */ +export const TestStreamPayload_Event_AddElements = new TestStreamPayload_Event_AddElements$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TestStreamPayload_TimestampedElement$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TestStreamPayload.TimestampedElement", [ + { no: 1, name: "encoded_element", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 2, name: "timestamp", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): TestStreamPayload_TimestampedElement { + const message = { encodedElement: new Uint8Array(0), timestamp: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TestStreamPayload_TimestampedElement): TestStreamPayload_TimestampedElement { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes encoded_element */ 1: + message.encodedElement = reader.bytes(); + break; + case /* int64 timestamp */ 2: + message.timestamp = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TestStreamPayload_TimestampedElement, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes encoded_element = 1; */ + if (message.encodedElement.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.encodedElement); + /* int64 timestamp = 2; */ + if (message.timestamp !== 0n) + writer.tag(2, WireType.Varint).int64(message.timestamp); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TestStreamPayload.TimestampedElement + */ +export const TestStreamPayload_TimestampedElement = new TestStreamPayload_TimestampedElement$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class EventsRequest$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.EventsRequest", [ + { no: 1, name: "output_ids", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): EventsRequest { + const message = { outputIds: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: EventsRequest): EventsRequest { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated string output_ids */ 1: + message.outputIds.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: EventsRequest, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated string output_ids = 1; */ + for (let i = 0; i < message.outputIds.length; i++) + writer.tag(1, WireType.LengthDelimited).string(message.outputIds[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.EventsRequest + */ +export const EventsRequest = new EventsRequest$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class WriteFilesPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.WriteFilesPayload", [ + { no: 1, name: "sink", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "format_function", kind: "message", T: () => FunctionSpec }, + { no: 3, name: "windowed_writes", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 4, name: "runner_determined_sharding", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 5, name: "side_inputs", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "message", T: () => SideInput } } + ]); + } + create(value?: PartialMessage): WriteFilesPayload { + const message = { windowedWrites: false, runnerDeterminedSharding: false, sideInputs: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: WriteFilesPayload): WriteFilesPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec sink */ 1: + message.sink = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.sink); + break; + case /* org.apache.beam.model.pipeline.v1.FunctionSpec format_function */ 2: + message.formatFunction = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.formatFunction); + break; + case /* bool windowed_writes */ 3: + message.windowedWrites = reader.bool(); + break; + case /* bool runner_determined_sharding */ 4: + message.runnerDeterminedSharding = reader.bool(); + break; + case /* map side_inputs */ 5: + this.binaryReadMap5(message.sideInputs, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap5(map: WriteFilesPayload["sideInputs"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof WriteFilesPayload["sideInputs"] | undefined, val: WriteFilesPayload["sideInputs"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = SideInput.internalBinaryRead(reader, reader.uint32(), options); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.WriteFilesPayload.side_inputs"); + } + } + map[key ?? ""] = val ?? SideInput.create(); + } + internalBinaryWrite(message: WriteFilesPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec sink = 1; */ + if (message.sink) + FunctionSpec.internalBinaryWrite(message.sink, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FunctionSpec format_function = 2; */ + if (message.formatFunction) + FunctionSpec.internalBinaryWrite(message.formatFunction, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* bool windowed_writes = 3; */ + if (message.windowedWrites !== false) + writer.tag(3, WireType.Varint).bool(message.windowedWrites); + /* bool runner_determined_sharding = 4; */ + if (message.runnerDeterminedSharding !== false) + writer.tag(4, WireType.Varint).bool(message.runnerDeterminedSharding); + /* map side_inputs = 5; */ + for (let k of Object.keys(message.sideInputs)) { + writer.tag(5, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k); + writer.tag(2, WireType.LengthDelimited).fork(); + SideInput.internalBinaryWrite(message.sideInputs[k], writer, options); + writer.join().join(); + } + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.WriteFilesPayload + */ +export const WriteFilesPayload = new WriteFilesPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PubSubReadPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.PubSubReadPayload", [ + { no: 1, name: "topic", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "subscription", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "timestamp_attribute", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "id_attribute", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "with_attributes", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 6, name: "topic_runtime_overridden", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 7, name: "subscription_runtime_overridden", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): PubSubReadPayload { + const message = { topic: "", subscription: "", timestampAttribute: "", idAttribute: "", withAttributes: false, topicRuntimeOverridden: "", subscriptionRuntimeOverridden: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PubSubReadPayload): PubSubReadPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string topic */ 1: + message.topic = reader.string(); + break; + case /* string subscription */ 2: + message.subscription = reader.string(); + break; + case /* string timestamp_attribute */ 3: + message.timestampAttribute = reader.string(); + break; + case /* string id_attribute */ 4: + message.idAttribute = reader.string(); + break; + case /* bool with_attributes */ 5: + message.withAttributes = reader.bool(); + break; + case /* string topic_runtime_overridden */ 6: + message.topicRuntimeOverridden = reader.string(); + break; + case /* string subscription_runtime_overridden */ 7: + message.subscriptionRuntimeOverridden = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PubSubReadPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string topic = 1; */ + if (message.topic !== "") + writer.tag(1, WireType.LengthDelimited).string(message.topic); + /* string subscription = 2; */ + if (message.subscription !== "") + writer.tag(2, WireType.LengthDelimited).string(message.subscription); + /* string timestamp_attribute = 3; */ + if (message.timestampAttribute !== "") + writer.tag(3, WireType.LengthDelimited).string(message.timestampAttribute); + /* string id_attribute = 4; */ + if (message.idAttribute !== "") + writer.tag(4, WireType.LengthDelimited).string(message.idAttribute); + /* bool with_attributes = 5; */ + if (message.withAttributes !== false) + writer.tag(5, WireType.Varint).bool(message.withAttributes); + /* string topic_runtime_overridden = 6; */ + if (message.topicRuntimeOverridden !== "") + writer.tag(6, WireType.LengthDelimited).string(message.topicRuntimeOverridden); + /* string subscription_runtime_overridden = 7; */ + if (message.subscriptionRuntimeOverridden !== "") + writer.tag(7, WireType.LengthDelimited).string(message.subscriptionRuntimeOverridden); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.PubSubReadPayload + */ +export const PubSubReadPayload = new PubSubReadPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PubSubWritePayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.PubSubWritePayload", [ + { no: 1, name: "topic", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "timestamp_attribute", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "id_attribute", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "topic_runtime_overridden", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): PubSubWritePayload { + const message = { topic: "", timestampAttribute: "", idAttribute: "", topicRuntimeOverridden: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PubSubWritePayload): PubSubWritePayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string topic */ 1: + message.topic = reader.string(); + break; + case /* string timestamp_attribute */ 2: + message.timestampAttribute = reader.string(); + break; + case /* string id_attribute */ 3: + message.idAttribute = reader.string(); + break; + case /* string topic_runtime_overridden */ 4: + message.topicRuntimeOverridden = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PubSubWritePayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string topic = 1; */ + if (message.topic !== "") + writer.tag(1, WireType.LengthDelimited).string(message.topic); + /* string timestamp_attribute = 2; */ + if (message.timestampAttribute !== "") + writer.tag(2, WireType.LengthDelimited).string(message.timestampAttribute); + /* string id_attribute = 3; */ + if (message.idAttribute !== "") + writer.tag(3, WireType.LengthDelimited).string(message.idAttribute); + /* string topic_runtime_overridden = 4; */ + if (message.topicRuntimeOverridden !== "") + writer.tag(4, WireType.LengthDelimited).string(message.topicRuntimeOverridden); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.PubSubWritePayload + */ +export const PubSubWritePayload = new PubSubWritePayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class GroupIntoBatchesPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.GroupIntoBatchesPayload", [ + { no: 1, name: "batch_size", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 3, name: "batch_size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 2, name: "max_buffering_duration_millis", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): GroupIntoBatchesPayload { + const message = { batchSize: 0n, batchSizeBytes: 0n, maxBufferingDurationMillis: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: GroupIntoBatchesPayload): GroupIntoBatchesPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 batch_size */ 1: + message.batchSize = reader.int64().toBigInt(); + break; + case /* int64 batch_size_bytes */ 3: + message.batchSizeBytes = reader.int64().toBigInt(); + break; + case /* int64 max_buffering_duration_millis */ 2: + message.maxBufferingDurationMillis = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: GroupIntoBatchesPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 batch_size = 1; */ + if (message.batchSize !== 0n) + writer.tag(1, WireType.Varint).int64(message.batchSize); + /* int64 batch_size_bytes = 3; */ + if (message.batchSizeBytes !== 0n) + writer.tag(3, WireType.Varint).int64(message.batchSizeBytes); + /* int64 max_buffering_duration_millis = 2; */ + if (message.maxBufferingDurationMillis !== 0n) + writer.tag(2, WireType.Varint).int64(message.maxBufferingDurationMillis); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.GroupIntoBatchesPayload + */ +export const GroupIntoBatchesPayload = new GroupIntoBatchesPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Coder$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Coder", [ + { no: 1, name: "spec", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "component_coder_ids", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): Coder { + const message = { componentCoderIds: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Coder): Coder { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec spec */ 1: + message.spec = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.spec); + break; + case /* repeated string component_coder_ids */ 2: + message.componentCoderIds.push(reader.string()); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Coder, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec spec = 1; */ + if (message.spec) + FunctionSpec.internalBinaryWrite(message.spec, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated string component_coder_ids = 2; */ + for (let i = 0; i < message.componentCoderIds.length; i++) + writer.tag(2, WireType.LengthDelimited).string(message.componentCoderIds[i]); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Coder + */ +export const Coder = new Coder$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardCoders$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardCoders", []); + } + create(value?: PartialMessage): StandardCoders { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardCoders): StandardCoders { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardCoders, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardCoders + */ +export const StandardCoders = new StandardCoders$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class WindowingStrategy$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.WindowingStrategy", [ + { no: 1, name: "window_fn", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "merge_status", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.MergeStatus.Enum", MergeStatus_Enum] }, + { no: 3, name: "window_coder_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "trigger", kind: "message", T: () => Trigger }, + { no: 5, name: "accumulation_mode", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.AccumulationMode.Enum", AccumulationMode_Enum] }, + { no: 6, name: "output_time", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.OutputTime.Enum", OutputTime_Enum] }, + { no: 7, name: "closing_behavior", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.ClosingBehavior.Enum", ClosingBehavior_Enum] }, + { no: 8, name: "allowed_lateness", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 9, name: "on_time_behavior", kind: "enum", T: () => ["org.apache.beam.model.pipeline.v1.OnTimeBehavior.Enum", OnTimeBehavior_Enum] }, + { no: 10, name: "assigns_to_one_window", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 11, name: "environment_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): WindowingStrategy { + const message = { mergeStatus: 0, windowCoderId: "", accumulationMode: 0, outputTime: 0, closingBehavior: 0, allowedLateness: 0n, onTimeBehavior: 0, assignsToOneWindow: false, environmentId: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: WindowingStrategy): WindowingStrategy { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec window_fn */ 1: + message.windowFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.windowFn); + break; + case /* org.apache.beam.model.pipeline.v1.MergeStatus.Enum merge_status */ 2: + message.mergeStatus = reader.int32(); + break; + case /* string window_coder_id */ 3: + message.windowCoderId = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.Trigger trigger */ 4: + message.trigger = Trigger.internalBinaryRead(reader, reader.uint32(), options, message.trigger); + break; + case /* org.apache.beam.model.pipeline.v1.AccumulationMode.Enum accumulation_mode */ 5: + message.accumulationMode = reader.int32(); + break; + case /* org.apache.beam.model.pipeline.v1.OutputTime.Enum output_time */ 6: + message.outputTime = reader.int32(); + break; + case /* org.apache.beam.model.pipeline.v1.ClosingBehavior.Enum closing_behavior */ 7: + message.closingBehavior = reader.int32(); + break; + case /* int64 allowed_lateness */ 8: + message.allowedLateness = reader.int64().toBigInt(); + break; + case /* org.apache.beam.model.pipeline.v1.OnTimeBehavior.Enum on_time_behavior */ 9: + message.onTimeBehavior = reader.int32(); + break; + case /* bool assigns_to_one_window */ 10: + message.assignsToOneWindow = reader.bool(); + break; + case /* string environment_id */ 11: + message.environmentId = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: WindowingStrategy, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec window_fn = 1; */ + if (message.windowFn) + FunctionSpec.internalBinaryWrite(message.windowFn, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.MergeStatus.Enum merge_status = 2; */ + if (message.mergeStatus !== 0) + writer.tag(2, WireType.Varint).int32(message.mergeStatus); + /* string window_coder_id = 3; */ + if (message.windowCoderId !== "") + writer.tag(3, WireType.LengthDelimited).string(message.windowCoderId); + /* org.apache.beam.model.pipeline.v1.Trigger trigger = 4; */ + if (message.trigger) + Trigger.internalBinaryWrite(message.trigger, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.AccumulationMode.Enum accumulation_mode = 5; */ + if (message.accumulationMode !== 0) + writer.tag(5, WireType.Varint).int32(message.accumulationMode); + /* org.apache.beam.model.pipeline.v1.OutputTime.Enum output_time = 6; */ + if (message.outputTime !== 0) + writer.tag(6, WireType.Varint).int32(message.outputTime); + /* org.apache.beam.model.pipeline.v1.ClosingBehavior.Enum closing_behavior = 7; */ + if (message.closingBehavior !== 0) + writer.tag(7, WireType.Varint).int32(message.closingBehavior); + /* int64 allowed_lateness = 8; */ + if (message.allowedLateness !== 0n) + writer.tag(8, WireType.Varint).int64(message.allowedLateness); + /* org.apache.beam.model.pipeline.v1.OnTimeBehavior.Enum on_time_behavior = 9; */ + if (message.onTimeBehavior !== 0) + writer.tag(9, WireType.Varint).int32(message.onTimeBehavior); + /* bool assigns_to_one_window = 10; */ + if (message.assignsToOneWindow !== false) + writer.tag(10, WireType.Varint).bool(message.assignsToOneWindow); + /* string environment_id = 11; */ + if (message.environmentId !== "") + writer.tag(11, WireType.LengthDelimited).string(message.environmentId); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.WindowingStrategy + */ +export const WindowingStrategy = new WindowingStrategy$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MergeStatus$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MergeStatus", []); + } + create(value?: PartialMessage): MergeStatus { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MergeStatus): MergeStatus { + return target ?? this.create(); + } + internalBinaryWrite(message: MergeStatus, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MergeStatus + */ +export const MergeStatus = new MergeStatus$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class AccumulationMode$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.AccumulationMode", []); + } + create(value?: PartialMessage): AccumulationMode { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: AccumulationMode): AccumulationMode { + return target ?? this.create(); + } + internalBinaryWrite(message: AccumulationMode, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.AccumulationMode + */ +export const AccumulationMode = new AccumulationMode$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ClosingBehavior$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ClosingBehavior", []); + } + create(value?: PartialMessage): ClosingBehavior { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ClosingBehavior): ClosingBehavior { + return target ?? this.create(); + } + internalBinaryWrite(message: ClosingBehavior, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ClosingBehavior + */ +export const ClosingBehavior = new ClosingBehavior$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class OnTimeBehavior$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.OnTimeBehavior", []); + } + create(value?: PartialMessage): OnTimeBehavior { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: OnTimeBehavior): OnTimeBehavior { + return target ?? this.create(); + } + internalBinaryWrite(message: OnTimeBehavior, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.OnTimeBehavior + */ +export const OnTimeBehavior = new OnTimeBehavior$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class OutputTime$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.OutputTime", []); + } + create(value?: PartialMessage): OutputTime { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: OutputTime): OutputTime { + return target ?? this.create(); + } + internalBinaryWrite(message: OutputTime, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.OutputTime + */ +export const OutputTime = new OutputTime$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TimeDomain$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TimeDomain", []); + } + create(value?: PartialMessage): TimeDomain { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TimeDomain): TimeDomain { + return target ?? this.create(); + } + internalBinaryWrite(message: TimeDomain, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TimeDomain + */ +export const TimeDomain = new TimeDomain$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger", [ + { no: 1, name: "after_all", kind: "message", oneof: "trigger", T: () => Trigger_AfterAll }, + { no: 2, name: "after_any", kind: "message", oneof: "trigger", T: () => Trigger_AfterAny }, + { no: 3, name: "after_each", kind: "message", oneof: "trigger", T: () => Trigger_AfterEach }, + { no: 4, name: "after_end_of_window", kind: "message", oneof: "trigger", T: () => Trigger_AfterEndOfWindow }, + { no: 5, name: "after_processing_time", kind: "message", oneof: "trigger", T: () => Trigger_AfterProcessingTime }, + { no: 6, name: "after_synchronized_processing_time", kind: "message", oneof: "trigger", T: () => Trigger_AfterSynchronizedProcessingTime }, + { no: 12, name: "always", kind: "message", oneof: "trigger", T: () => Trigger_Always }, + { no: 7, name: "default", kind: "message", oneof: "trigger", T: () => Trigger_Default }, + { no: 8, name: "element_count", kind: "message", oneof: "trigger", T: () => Trigger_ElementCount }, + { no: 9, name: "never", kind: "message", oneof: "trigger", T: () => Trigger_Never }, + { no: 10, name: "or_finally", kind: "message", oneof: "trigger", T: () => Trigger_OrFinally }, + { no: 11, name: "repeat", kind: "message", oneof: "trigger", T: () => Trigger_Repeat } + ]); + } + create(value?: PartialMessage): Trigger { + const message = { trigger: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger): Trigger { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Trigger.AfterAll after_all */ 1: + message.trigger = { + oneofKind: "afterAll", + afterAll: Trigger_AfterAll.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).afterAll) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.AfterAny after_any */ 2: + message.trigger = { + oneofKind: "afterAny", + afterAny: Trigger_AfterAny.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).afterAny) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.AfterEach after_each */ 3: + message.trigger = { + oneofKind: "afterEach", + afterEach: Trigger_AfterEach.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).afterEach) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.AfterEndOfWindow after_end_of_window */ 4: + message.trigger = { + oneofKind: "afterEndOfWindow", + afterEndOfWindow: Trigger_AfterEndOfWindow.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).afterEndOfWindow) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.AfterProcessingTime after_processing_time */ 5: + message.trigger = { + oneofKind: "afterProcessingTime", + afterProcessingTime: Trigger_AfterProcessingTime.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).afterProcessingTime) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.AfterSynchronizedProcessingTime after_synchronized_processing_time */ 6: + message.trigger = { + oneofKind: "afterSynchronizedProcessingTime", + afterSynchronizedProcessingTime: Trigger_AfterSynchronizedProcessingTime.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).afterSynchronizedProcessingTime) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.Always always */ 12: + message.trigger = { + oneofKind: "always", + always: Trigger_Always.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).always) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.Default default */ 7: + message.trigger = { + oneofKind: "default", + default: Trigger_Default.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).default) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.ElementCount element_count */ 8: + message.trigger = { + oneofKind: "elementCount", + elementCount: Trigger_ElementCount.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).elementCount) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.Never never */ 9: + message.trigger = { + oneofKind: "never", + never: Trigger_Never.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).never) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.OrFinally or_finally */ 10: + message.trigger = { + oneofKind: "orFinally", + orFinally: Trigger_OrFinally.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).orFinally) + }; + break; + case /* org.apache.beam.model.pipeline.v1.Trigger.Repeat repeat */ 11: + message.trigger = { + oneofKind: "repeat", + repeat: Trigger_Repeat.internalBinaryRead(reader, reader.uint32(), options, (message.trigger as any).repeat) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Trigger.AfterAll after_all = 1; */ + if (message.trigger.oneofKind === "afterAll") + Trigger_AfterAll.internalBinaryWrite(message.trigger.afterAll, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.AfterAny after_any = 2; */ + if (message.trigger.oneofKind === "afterAny") + Trigger_AfterAny.internalBinaryWrite(message.trigger.afterAny, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.AfterEach after_each = 3; */ + if (message.trigger.oneofKind === "afterEach") + Trigger_AfterEach.internalBinaryWrite(message.trigger.afterEach, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.AfterEndOfWindow after_end_of_window = 4; */ + if (message.trigger.oneofKind === "afterEndOfWindow") + Trigger_AfterEndOfWindow.internalBinaryWrite(message.trigger.afterEndOfWindow, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.AfterProcessingTime after_processing_time = 5; */ + if (message.trigger.oneofKind === "afterProcessingTime") + Trigger_AfterProcessingTime.internalBinaryWrite(message.trigger.afterProcessingTime, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.AfterSynchronizedProcessingTime after_synchronized_processing_time = 6; */ + if (message.trigger.oneofKind === "afterSynchronizedProcessingTime") + Trigger_AfterSynchronizedProcessingTime.internalBinaryWrite(message.trigger.afterSynchronizedProcessingTime, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.Always always = 12; */ + if (message.trigger.oneofKind === "always") + Trigger_Always.internalBinaryWrite(message.trigger.always, writer.tag(12, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.Default default = 7; */ + if (message.trigger.oneofKind === "default") + Trigger_Default.internalBinaryWrite(message.trigger.default, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.ElementCount element_count = 8; */ + if (message.trigger.oneofKind === "elementCount") + Trigger_ElementCount.internalBinaryWrite(message.trigger.elementCount, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.Never never = 9; */ + if (message.trigger.oneofKind === "never") + Trigger_Never.internalBinaryWrite(message.trigger.never, writer.tag(9, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.OrFinally or_finally = 10; */ + if (message.trigger.oneofKind === "orFinally") + Trigger_OrFinally.internalBinaryWrite(message.trigger.orFinally, writer.tag(10, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger.Repeat repeat = 11; */ + if (message.trigger.oneofKind === "repeat") + Trigger_Repeat.internalBinaryWrite(message.trigger.repeat, writer.tag(11, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger + */ +export const Trigger = new Trigger$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_AfterAll$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.AfterAll", [ + { no: 1, name: "subtriggers", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Trigger } + ]); + } + create(value?: PartialMessage): Trigger_AfterAll { + const message = { subtriggers: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_AfterAll): Trigger_AfterAll { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers */ 1: + message.subtriggers.push(Trigger.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_AfterAll, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers = 1; */ + for (let i = 0; i < message.subtriggers.length; i++) + Trigger.internalBinaryWrite(message.subtriggers[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterAll + */ +export const Trigger_AfterAll = new Trigger_AfterAll$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_AfterAny$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.AfterAny", [ + { no: 1, name: "subtriggers", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Trigger } + ]); + } + create(value?: PartialMessage): Trigger_AfterAny { + const message = { subtriggers: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_AfterAny): Trigger_AfterAny { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers */ 1: + message.subtriggers.push(Trigger.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_AfterAny, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers = 1; */ + for (let i = 0; i < message.subtriggers.length; i++) + Trigger.internalBinaryWrite(message.subtriggers[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterAny + */ +export const Trigger_AfterAny = new Trigger_AfterAny$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_AfterEach$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.AfterEach", [ + { no: 1, name: "subtriggers", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Trigger } + ]); + } + create(value?: PartialMessage): Trigger_AfterEach { + const message = { subtriggers: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_AfterEach): Trigger_AfterEach { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers */ 1: + message.subtriggers.push(Trigger.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_AfterEach, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.Trigger subtriggers = 1; */ + for (let i = 0; i < message.subtriggers.length; i++) + Trigger.internalBinaryWrite(message.subtriggers[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterEach + */ +export const Trigger_AfterEach = new Trigger_AfterEach$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_AfterEndOfWindow$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.AfterEndOfWindow", [ + { no: 1, name: "early_firings", kind: "message", T: () => Trigger }, + { no: 2, name: "late_firings", kind: "message", T: () => Trigger } + ]); + } + create(value?: PartialMessage): Trigger_AfterEndOfWindow { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_AfterEndOfWindow): Trigger_AfterEndOfWindow { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Trigger early_firings */ 1: + message.earlyFirings = Trigger.internalBinaryRead(reader, reader.uint32(), options, message.earlyFirings); + break; + case /* org.apache.beam.model.pipeline.v1.Trigger late_firings */ 2: + message.lateFirings = Trigger.internalBinaryRead(reader, reader.uint32(), options, message.lateFirings); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_AfterEndOfWindow, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Trigger early_firings = 1; */ + if (message.earlyFirings) + Trigger.internalBinaryWrite(message.earlyFirings, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger late_firings = 2; */ + if (message.lateFirings) + Trigger.internalBinaryWrite(message.lateFirings, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterEndOfWindow + */ +export const Trigger_AfterEndOfWindow = new Trigger_AfterEndOfWindow$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_AfterProcessingTime$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.AfterProcessingTime", [ + { no: 1, name: "timestamp_transforms", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => TimestampTransform } + ]); + } + create(value?: PartialMessage): Trigger_AfterProcessingTime { + const message = { timestampTransforms: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_AfterProcessingTime): Trigger_AfterProcessingTime { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.TimestampTransform timestamp_transforms */ 1: + message.timestampTransforms.push(TimestampTransform.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_AfterProcessingTime, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.TimestampTransform timestamp_transforms = 1; */ + for (let i = 0; i < message.timestampTransforms.length; i++) + TimestampTransform.internalBinaryWrite(message.timestampTransforms[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterProcessingTime + */ +export const Trigger_AfterProcessingTime = new Trigger_AfterProcessingTime$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_AfterSynchronizedProcessingTime$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.AfterSynchronizedProcessingTime", []); + } + create(value?: PartialMessage): Trigger_AfterSynchronizedProcessingTime { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_AfterSynchronizedProcessingTime): Trigger_AfterSynchronizedProcessingTime { + return target ?? this.create(); + } + internalBinaryWrite(message: Trigger_AfterSynchronizedProcessingTime, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.AfterSynchronizedProcessingTime + */ +export const Trigger_AfterSynchronizedProcessingTime = new Trigger_AfterSynchronizedProcessingTime$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_Default$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.Default", []); + } + create(value?: PartialMessage): Trigger_Default { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_Default): Trigger_Default { + return target ?? this.create(); + } + internalBinaryWrite(message: Trigger_Default, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.Default + */ +export const Trigger_Default = new Trigger_Default$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_ElementCount$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.ElementCount", [ + { no: 1, name: "element_count", kind: "scalar", T: 5 /*ScalarType.INT32*/ } + ]); + } + create(value?: PartialMessage): Trigger_ElementCount { + const message = { elementCount: 0 }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_ElementCount): Trigger_ElementCount { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int32 element_count */ 1: + message.elementCount = reader.int32(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_ElementCount, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int32 element_count = 1; */ + if (message.elementCount !== 0) + writer.tag(1, WireType.Varint).int32(message.elementCount); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.ElementCount + */ +export const Trigger_ElementCount = new Trigger_ElementCount$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_Never$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.Never", []); + } + create(value?: PartialMessage): Trigger_Never { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_Never): Trigger_Never { + return target ?? this.create(); + } + internalBinaryWrite(message: Trigger_Never, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.Never + */ +export const Trigger_Never = new Trigger_Never$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_Always$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.Always", []); + } + create(value?: PartialMessage): Trigger_Always { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_Always): Trigger_Always { + return target ?? this.create(); + } + internalBinaryWrite(message: Trigger_Always, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.Always + */ +export const Trigger_Always = new Trigger_Always$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_OrFinally$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.OrFinally", [ + { no: 1, name: "main", kind: "message", T: () => Trigger }, + { no: 2, name: "finally", kind: "message", T: () => Trigger } + ]); + } + create(value?: PartialMessage): Trigger_OrFinally { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_OrFinally): Trigger_OrFinally { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Trigger main */ 1: + message.main = Trigger.internalBinaryRead(reader, reader.uint32(), options, message.main); + break; + case /* org.apache.beam.model.pipeline.v1.Trigger finally */ 2: + message.finally = Trigger.internalBinaryRead(reader, reader.uint32(), options, message.finally); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_OrFinally, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Trigger main = 1; */ + if (message.main) + Trigger.internalBinaryWrite(message.main, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Trigger finally = 2; */ + if (message.finally) + Trigger.internalBinaryWrite(message.finally, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.OrFinally + */ +export const Trigger_OrFinally = new Trigger_OrFinally$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Trigger_Repeat$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Trigger.Repeat", [ + { no: 1, name: "subtrigger", kind: "message", T: () => Trigger } + ]); + } + create(value?: PartialMessage): Trigger_Repeat { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Trigger_Repeat): Trigger_Repeat { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Trigger subtrigger */ 1: + message.subtrigger = Trigger.internalBinaryRead(reader, reader.uint32(), options, message.subtrigger); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Trigger_Repeat, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Trigger subtrigger = 1; */ + if (message.subtrigger) + Trigger.internalBinaryWrite(message.subtrigger, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Trigger.Repeat + */ +export const Trigger_Repeat = new Trigger_Repeat$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TimestampTransform$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TimestampTransform", [ + { no: 1, name: "delay", kind: "message", oneof: "timestampTransform", T: () => TimestampTransform_Delay }, + { no: 2, name: "align_to", kind: "message", oneof: "timestampTransform", T: () => TimestampTransform_AlignTo } + ]); + } + create(value?: PartialMessage): TimestampTransform { + const message = { timestampTransform: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TimestampTransform): TimestampTransform { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.TimestampTransform.Delay delay */ 1: + message.timestampTransform = { + oneofKind: "delay", + delay: TimestampTransform_Delay.internalBinaryRead(reader, reader.uint32(), options, (message.timestampTransform as any).delay) + }; + break; + case /* org.apache.beam.model.pipeline.v1.TimestampTransform.AlignTo align_to */ 2: + message.timestampTransform = { + oneofKind: "alignTo", + alignTo: TimestampTransform_AlignTo.internalBinaryRead(reader, reader.uint32(), options, (message.timestampTransform as any).alignTo) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TimestampTransform, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.TimestampTransform.Delay delay = 1; */ + if (message.timestampTransform.oneofKind === "delay") + TimestampTransform_Delay.internalBinaryWrite(message.timestampTransform.delay, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.TimestampTransform.AlignTo align_to = 2; */ + if (message.timestampTransform.oneofKind === "alignTo") + TimestampTransform_AlignTo.internalBinaryWrite(message.timestampTransform.alignTo, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TimestampTransform + */ +export const TimestampTransform = new TimestampTransform$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TimestampTransform_Delay$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TimestampTransform.Delay", [ + { no: 1, name: "delay_millis", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): TimestampTransform_Delay { + const message = { delayMillis: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TimestampTransform_Delay): TimestampTransform_Delay { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 delay_millis */ 1: + message.delayMillis = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TimestampTransform_Delay, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 delay_millis = 1; */ + if (message.delayMillis !== 0n) + writer.tag(1, WireType.Varint).int64(message.delayMillis); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TimestampTransform.Delay + */ +export const TimestampTransform_Delay = new TimestampTransform_Delay$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class TimestampTransform_AlignTo$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.TimestampTransform.AlignTo", [ + { no: 3, name: "period", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 4, name: "offset", kind: "scalar", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ } + ]); + } + create(value?: PartialMessage): TimestampTransform_AlignTo { + const message = { period: 0n, offset: 0n }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: TimestampTransform_AlignTo): TimestampTransform_AlignTo { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* int64 period */ 3: + message.period = reader.int64().toBigInt(); + break; + case /* int64 offset */ 4: + message.offset = reader.int64().toBigInt(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: TimestampTransform_AlignTo, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* int64 period = 3; */ + if (message.period !== 0n) + writer.tag(3, WireType.Varint).int64(message.period); + /* int64 offset = 4; */ + if (message.offset !== 0n) + writer.tag(4, WireType.Varint).int64(message.offset); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.TimestampTransform.AlignTo + */ +export const TimestampTransform_AlignTo = new TimestampTransform_AlignTo$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class SideInput$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.SideInput", [ + { no: 1, name: "access_pattern", kind: "message", T: () => FunctionSpec }, + { no: 2, name: "view_fn", kind: "message", T: () => FunctionSpec }, + { no: 3, name: "window_mapping_fn", kind: "message", T: () => FunctionSpec } + ]); + } + create(value?: PartialMessage): SideInput { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: SideInput): SideInput { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FunctionSpec access_pattern */ 1: + message.accessPattern = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.accessPattern); + break; + case /* org.apache.beam.model.pipeline.v1.FunctionSpec view_fn */ 2: + message.viewFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.viewFn); + break; + case /* org.apache.beam.model.pipeline.v1.FunctionSpec window_mapping_fn */ 3: + message.windowMappingFn = FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, message.windowMappingFn); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: SideInput, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FunctionSpec access_pattern = 1; */ + if (message.accessPattern) + FunctionSpec.internalBinaryWrite(message.accessPattern, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FunctionSpec view_fn = 2; */ + if (message.viewFn) + FunctionSpec.internalBinaryWrite(message.viewFn, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FunctionSpec window_mapping_fn = 3; */ + if (message.windowMappingFn) + FunctionSpec.internalBinaryWrite(message.windowMappingFn, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.SideInput + */ +export const SideInput = new SideInput$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardArtifacts$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardArtifacts", []); + } + create(value?: PartialMessage): StandardArtifacts { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardArtifacts): StandardArtifacts { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardArtifacts, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardArtifacts + */ +export const StandardArtifacts = new StandardArtifacts$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactFilePayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ArtifactFilePayload", [ + { no: 1, name: "path", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "sha256", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ArtifactFilePayload { + const message = { path: "", sha256: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactFilePayload): ArtifactFilePayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string path */ 1: + message.path = reader.string(); + break; + case /* string sha256 */ 2: + message.sha256 = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactFilePayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string path = 1; */ + if (message.path !== "") + writer.tag(1, WireType.LengthDelimited).string(message.path); + /* string sha256 = 2; */ + if (message.sha256 !== "") + writer.tag(2, WireType.LengthDelimited).string(message.sha256); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ArtifactFilePayload + */ +export const ArtifactFilePayload = new ArtifactFilePayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactUrlPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ArtifactUrlPayload", [ + { no: 1, name: "url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "sha256", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ArtifactUrlPayload { + const message = { url: "", sha256: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactUrlPayload): ArtifactUrlPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string url */ 1: + message.url = reader.string(); + break; + case /* string sha256 */ 2: + message.sha256 = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactUrlPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string url = 1; */ + if (message.url !== "") + writer.tag(1, WireType.LengthDelimited).string(message.url); + /* string sha256 = 2; */ + if (message.sha256 !== "") + writer.tag(2, WireType.LengthDelimited).string(message.sha256); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ArtifactUrlPayload + */ +export const ArtifactUrlPayload = new ArtifactUrlPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class EmbeddedFilePayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.EmbeddedFilePayload", [ + { no: 1, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): EmbeddedFilePayload { + const message = { data: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: EmbeddedFilePayload): EmbeddedFilePayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bytes data */ 1: + message.data = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: EmbeddedFilePayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bytes data = 1; */ + if (message.data.length) + writer.tag(1, WireType.LengthDelimited).bytes(message.data); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.EmbeddedFilePayload + */ +export const EmbeddedFilePayload = new EmbeddedFilePayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class PyPIPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.PyPIPayload", [ + { no: 1, name: "artifact_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): PyPIPayload { + const message = { artifactId: "", version: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: PyPIPayload): PyPIPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string artifact_id */ 1: + message.artifactId = reader.string(); + break; + case /* string version */ 2: + message.version = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: PyPIPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string artifact_id = 1; */ + if (message.artifactId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.artifactId); + /* string version = 2; */ + if (message.version !== "") + writer.tag(2, WireType.LengthDelimited).string(message.version); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.PyPIPayload + */ +export const PyPIPayload = new PyPIPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MavenPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MavenPayload", [ + { no: 1, name: "artifact", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "repository_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): MavenPayload { + const message = { artifact: "", repositoryUrl: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MavenPayload): MavenPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string artifact */ 1: + message.artifact = reader.string(); + break; + case /* string repository_url */ 2: + message.repositoryUrl = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MavenPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string artifact = 1; */ + if (message.artifact !== "") + writer.tag(1, WireType.LengthDelimited).string(message.artifact); + /* string repository_url = 2; */ + if (message.repositoryUrl !== "") + writer.tag(2, WireType.LengthDelimited).string(message.repositoryUrl); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MavenPayload + */ +export const MavenPayload = new MavenPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DeferredArtifactPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.DeferredArtifactPayload", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "data", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): DeferredArtifactPayload { + const message = { key: "", data: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DeferredArtifactPayload): DeferredArtifactPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* bytes data */ 2: + message.data = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DeferredArtifactPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* bytes data = 2; */ + if (message.data.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.data); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.DeferredArtifactPayload + */ +export const DeferredArtifactPayload = new DeferredArtifactPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactStagingToRolePayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ArtifactStagingToRolePayload", [ + { no: 1, name: "staged_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ArtifactStagingToRolePayload { + const message = { stagedName: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactStagingToRolePayload): ArtifactStagingToRolePayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string staged_name */ 1: + message.stagedName = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactStagingToRolePayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string staged_name = 1; */ + if (message.stagedName !== "") + writer.tag(1, WireType.LengthDelimited).string(message.stagedName); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ArtifactStagingToRolePayload + */ +export const ArtifactStagingToRolePayload = new ArtifactStagingToRolePayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArtifactInformation$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ArtifactInformation", [ + { no: 1, name: "type_urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "type_payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 3, name: "role_urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "role_payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): ArtifactInformation { + const message = { typeUrn: "", typePayload: new Uint8Array(0), roleUrn: "", rolePayload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArtifactInformation): ArtifactInformation { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string type_urn */ 1: + message.typeUrn = reader.string(); + break; + case /* bytes type_payload */ 2: + message.typePayload = reader.bytes(); + break; + case /* string role_urn */ 3: + message.roleUrn = reader.string(); + break; + case /* bytes role_payload */ 4: + message.rolePayload = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArtifactInformation, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string type_urn = 1; */ + if (message.typeUrn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.typeUrn); + /* bytes type_payload = 2; */ + if (message.typePayload.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.typePayload); + /* string role_urn = 3; */ + if (message.roleUrn !== "") + writer.tag(3, WireType.LengthDelimited).string(message.roleUrn); + /* bytes role_payload = 4; */ + if (message.rolePayload.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.rolePayload); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ArtifactInformation + */ +export const ArtifactInformation = new ArtifactInformation$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Environment$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Environment", [ + { no: 2, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "display_data", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => DisplayData }, + { no: 5, name: "capabilities", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "dependencies", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ArtifactInformation }, + { no: 7, name: "resource_hints", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 12 /*ScalarType.BYTES*/ } } + ]); + } + create(value?: PartialMessage): Environment { + const message = { urn: "", payload: new Uint8Array(0), displayData: [], capabilities: [], dependencies: [], resourceHints: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Environment): Environment { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 2: + message.urn = reader.string(); + break; + case /* bytes payload */ 3: + message.payload = reader.bytes(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data */ 4: + message.displayData.push(DisplayData.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated string capabilities */ 5: + message.capabilities.push(reader.string()); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation dependencies */ 6: + message.dependencies.push(ArtifactInformation.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* map resource_hints */ 7: + this.binaryReadMap7(message.resourceHints, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap7(map: Environment["resourceHints"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof Environment["resourceHints"] | undefined, val: Environment["resourceHints"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.bytes(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.Environment.resource_hints"); + } + } + map[key ?? ""] = val ?? new Uint8Array(0); + } + internalBinaryWrite(message: Environment, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 2; */ + if (message.urn !== "") + writer.tag(2, WireType.LengthDelimited).string(message.urn); + /* bytes payload = 3; */ + if (message.payload.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.payload); + /* repeated org.apache.beam.model.pipeline.v1.DisplayData display_data = 4; */ + for (let i = 0; i < message.displayData.length; i++) + DisplayData.internalBinaryWrite(message.displayData[i], writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* repeated string capabilities = 5; */ + for (let i = 0; i < message.capabilities.length; i++) + writer.tag(5, WireType.LengthDelimited).string(message.capabilities[i]); + /* repeated org.apache.beam.model.pipeline.v1.ArtifactInformation dependencies = 6; */ + for (let i = 0; i < message.dependencies.length; i++) + ArtifactInformation.internalBinaryWrite(message.dependencies[i], writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* map resource_hints = 7; */ + for (let k of Object.keys(message.resourceHints)) + writer.tag(7, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).bytes(message.resourceHints[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Environment + */ +export const Environment = new Environment$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardEnvironments$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardEnvironments", []); + } + create(value?: PartialMessage): StandardEnvironments { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardEnvironments): StandardEnvironments { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardEnvironments, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardEnvironments + */ +export const StandardEnvironments = new StandardEnvironments$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DockerPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.DockerPayload", [ + { no: 1, name: "container_image", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): DockerPayload { + const message = { containerImage: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DockerPayload): DockerPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string container_image */ 1: + message.containerImage = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DockerPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string container_image = 1; */ + if (message.containerImage !== "") + writer.tag(1, WireType.LengthDelimited).string(message.containerImage); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.DockerPayload + */ +export const DockerPayload = new DockerPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ProcessPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ProcessPayload", [ + { no: 1, name: "os", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "arch", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "command", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "env", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } } + ]); + } + create(value?: PartialMessage): ProcessPayload { + const message = { os: "", arch: "", command: "", env: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ProcessPayload): ProcessPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string os */ 1: + message.os = reader.string(); + break; + case /* string arch */ 2: + message.arch = reader.string(); + break; + case /* string command */ 3: + message.command = reader.string(); + break; + case /* map env */ 4: + this.binaryReadMap4(message.env, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap4(map: ProcessPayload["env"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ProcessPayload["env"] | undefined, val: ProcessPayload["env"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.ProcessPayload.env"); + } + } + map[key ?? ""] = val ?? ""; + } + internalBinaryWrite(message: ProcessPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string os = 1; */ + if (message.os !== "") + writer.tag(1, WireType.LengthDelimited).string(message.os); + /* string arch = 2; */ + if (message.arch !== "") + writer.tag(2, WireType.LengthDelimited).string(message.arch); + /* string command = 3; */ + if (message.command !== "") + writer.tag(3, WireType.LengthDelimited).string(message.command); + /* map env = 4; */ + for (let k of Object.keys(message.env)) + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.env[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ProcessPayload + */ +export const ProcessPayload = new ProcessPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExternalPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExternalPayload", [ + { no: 1, name: "endpoint", kind: "message", T: () => ApiServiceDescriptor }, + { no: 2, name: "params", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } } + ]); + } + create(value?: PartialMessage): ExternalPayload { + const message = { params: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExternalPayload): ExternalPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor endpoint */ 1: + message.endpoint = ApiServiceDescriptor.internalBinaryRead(reader, reader.uint32(), options, message.endpoint); + break; + case /* map params */ 2: + this.binaryReadMap2(message.params, reader, options); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap2(map: ExternalPayload["params"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof ExternalPayload["params"] | undefined, val: ExternalPayload["params"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.ExternalPayload.params"); + } + } + map[key ?? ""] = val ?? ""; + } + internalBinaryWrite(message: ExternalPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.ApiServiceDescriptor endpoint = 1; */ + if (message.endpoint) + ApiServiceDescriptor.internalBinaryWrite(message.endpoint, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* map params = 2; */ + for (let k of Object.keys(message.params)) + writer.tag(2, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.params[k]).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExternalPayload + */ +export const ExternalPayload = new ExternalPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardProtocols$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardProtocols", []); + } + create(value?: PartialMessage): StandardProtocols { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardProtocols): StandardProtocols { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardProtocols, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardProtocols + */ +export const StandardProtocols = new StandardProtocols$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardRunnerProtocols$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardRunnerProtocols", []); + } + create(value?: PartialMessage): StandardRunnerProtocols { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardRunnerProtocols): StandardRunnerProtocols { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardRunnerProtocols, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardRunnerProtocols + */ +export const StandardRunnerProtocols = new StandardRunnerProtocols$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardRequirements$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardRequirements", []); + } + create(value?: PartialMessage): StandardRequirements { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardRequirements): StandardRequirements { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardRequirements, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardRequirements + */ +export const StandardRequirements = new StandardRequirements$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FunctionSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.FunctionSpec", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): FunctionSpec { + const message = { urn: "", payload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FunctionSpec): FunctionSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* bytes payload */ 3: + message.payload = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FunctionSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* bytes payload = 3; */ + if (message.payload.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.payload); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.FunctionSpec + */ +export const FunctionSpec = new FunctionSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardDisplayData$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardDisplayData", []); + } + create(value?: PartialMessage): StandardDisplayData { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardDisplayData): StandardDisplayData { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardDisplayData, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardDisplayData + */ +export const StandardDisplayData = new StandardDisplayData$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LabelledPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.LabelledPayload", [ + { no: 1, name: "label", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "string_value", kind: "scalar", oneof: "value", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "bool_value", kind: "scalar", oneof: "value", T: 8 /*ScalarType.BOOL*/ }, + { no: 4, name: "double_value", kind: "scalar", oneof: "value", T: 1 /*ScalarType.DOUBLE*/ }, + { no: 5, name: "int_value", kind: "scalar", oneof: "value", T: 3 /*ScalarType.INT64*/, L: 0 /*LongType.BIGINT*/ }, + { no: 6, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 7, name: "namespace", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): LabelledPayload { + const message = { label: "", value: { oneofKind: undefined }, key: "", namespace: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LabelledPayload): LabelledPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string label */ 1: + message.label = reader.string(); + break; + case /* string string_value */ 2: + message.value = { + oneofKind: "stringValue", + stringValue: reader.string() + }; + break; + case /* bool bool_value */ 3: + message.value = { + oneofKind: "boolValue", + boolValue: reader.bool() + }; + break; + case /* double double_value */ 4: + message.value = { + oneofKind: "doubleValue", + doubleValue: reader.double() + }; + break; + case /* int64 int_value */ 5: + message.value = { + oneofKind: "intValue", + intValue: reader.int64().toBigInt() + }; + break; + case /* string key */ 6: + message.key = reader.string(); + break; + case /* string namespace */ 7: + message.namespace = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LabelledPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string label = 1; */ + if (message.label !== "") + writer.tag(1, WireType.LengthDelimited).string(message.label); + /* string string_value = 2; */ + if (message.value.oneofKind === "stringValue") + writer.tag(2, WireType.LengthDelimited).string(message.value.stringValue); + /* bool bool_value = 3; */ + if (message.value.oneofKind === "boolValue") + writer.tag(3, WireType.Varint).bool(message.value.boolValue); + /* double double_value = 4; */ + if (message.value.oneofKind === "doubleValue") + writer.tag(4, WireType.Bit64).double(message.value.doubleValue); + /* int64 int_value = 5; */ + if (message.value.oneofKind === "intValue") + writer.tag(5, WireType.Varint).int64(message.value.intValue); + /* string key = 6; */ + if (message.key !== "") + writer.tag(6, WireType.LengthDelimited).string(message.key); + /* string namespace = 7; */ + if (message.namespace !== "") + writer.tag(7, WireType.LengthDelimited).string(message.namespace); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.LabelledPayload + */ +export const LabelledPayload = new LabelledPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class DisplayData$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.DisplayData", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): DisplayData { + const message = { urn: "", payload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: DisplayData): DisplayData { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* bytes payload */ 2: + message.payload = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: DisplayData, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* bytes payload = 2; */ + if (message.payload.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.payload); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.DisplayData + */ +export const DisplayData = new DisplayData$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MessageWithComponents$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MessageWithComponents", [ + { no: 1, name: "components", kind: "message", T: () => Components }, + { no: 2, name: "coder", kind: "message", oneof: "root", T: () => Coder }, + { no: 3, name: "combine_payload", kind: "message", oneof: "root", T: () => CombinePayload }, + { no: 4, name: "function_spec", kind: "message", oneof: "root", T: () => FunctionSpec }, + { no: 6, name: "par_do_payload", kind: "message", oneof: "root", T: () => ParDoPayload }, + { no: 7, name: "ptransform", kind: "message", oneof: "root", T: () => PTransform }, + { no: 8, name: "pcollection", kind: "message", oneof: "root", T: () => PCollection }, + { no: 9, name: "read_payload", kind: "message", oneof: "root", T: () => ReadPayload }, + { no: 11, name: "side_input", kind: "message", oneof: "root", T: () => SideInput }, + { no: 12, name: "window_into_payload", kind: "message", oneof: "root", T: () => WindowIntoPayload }, + { no: 13, name: "windowing_strategy", kind: "message", oneof: "root", T: () => WindowingStrategy } + ]); + } + create(value?: PartialMessage): MessageWithComponents { + const message = { root: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MessageWithComponents): MessageWithComponents { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Components components */ 1: + message.components = Components.internalBinaryRead(reader, reader.uint32(), options, message.components); + break; + case /* org.apache.beam.model.pipeline.v1.Coder coder */ 2: + message.root = { + oneofKind: "coder", + coder: Coder.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).coder) + }; + break; + case /* org.apache.beam.model.pipeline.v1.CombinePayload combine_payload */ 3: + message.root = { + oneofKind: "combinePayload", + combinePayload: CombinePayload.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).combinePayload) + }; + break; + case /* org.apache.beam.model.pipeline.v1.FunctionSpec function_spec */ 4: + message.root = { + oneofKind: "functionSpec", + functionSpec: FunctionSpec.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).functionSpec) + }; + break; + case /* org.apache.beam.model.pipeline.v1.ParDoPayload par_do_payload */ 6: + message.root = { + oneofKind: "parDoPayload", + parDoPayload: ParDoPayload.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).parDoPayload) + }; + break; + case /* org.apache.beam.model.pipeline.v1.PTransform ptransform */ 7: + message.root = { + oneofKind: "ptransform", + ptransform: PTransform.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).ptransform) + }; + break; + case /* org.apache.beam.model.pipeline.v1.PCollection pcollection */ 8: + message.root = { + oneofKind: "pcollection", + pcollection: PCollection.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).pcollection) + }; + break; + case /* org.apache.beam.model.pipeline.v1.ReadPayload read_payload */ 9: + message.root = { + oneofKind: "readPayload", + readPayload: ReadPayload.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).readPayload) + }; + break; + case /* org.apache.beam.model.pipeline.v1.SideInput side_input */ 11: + message.root = { + oneofKind: "sideInput", + sideInput: SideInput.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).sideInput) + }; + break; + case /* org.apache.beam.model.pipeline.v1.WindowIntoPayload window_into_payload */ 12: + message.root = { + oneofKind: "windowIntoPayload", + windowIntoPayload: WindowIntoPayload.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).windowIntoPayload) + }; + break; + case /* org.apache.beam.model.pipeline.v1.WindowingStrategy windowing_strategy */ 13: + message.root = { + oneofKind: "windowingStrategy", + windowingStrategy: WindowingStrategy.internalBinaryRead(reader, reader.uint32(), options, (message.root as any).windowingStrategy) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MessageWithComponents, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Components components = 1; */ + if (message.components) + Components.internalBinaryWrite(message.components, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.Coder coder = 2; */ + if (message.root.oneofKind === "coder") + Coder.internalBinaryWrite(message.root.coder, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.CombinePayload combine_payload = 3; */ + if (message.root.oneofKind === "combinePayload") + CombinePayload.internalBinaryWrite(message.root.combinePayload, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FunctionSpec function_spec = 4; */ + if (message.root.oneofKind === "functionSpec") + FunctionSpec.internalBinaryWrite(message.root.functionSpec, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ParDoPayload par_do_payload = 6; */ + if (message.root.oneofKind === "parDoPayload") + ParDoPayload.internalBinaryWrite(message.root.parDoPayload, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.PTransform ptransform = 7; */ + if (message.root.oneofKind === "ptransform") + PTransform.internalBinaryWrite(message.root.ptransform, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.PCollection pcollection = 8; */ + if (message.root.oneofKind === "pcollection") + PCollection.internalBinaryWrite(message.root.pcollection, writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.ReadPayload read_payload = 9; */ + if (message.root.oneofKind === "readPayload") + ReadPayload.internalBinaryWrite(message.root.readPayload, writer.tag(9, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.SideInput side_input = 11; */ + if (message.root.oneofKind === "sideInput") + SideInput.internalBinaryWrite(message.root.sideInput, writer.tag(11, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.WindowIntoPayload window_into_payload = 12; */ + if (message.root.oneofKind === "windowIntoPayload") + WindowIntoPayload.internalBinaryWrite(message.root.windowIntoPayload, writer.tag(12, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.WindowingStrategy windowing_strategy = 13; */ + if (message.root.oneofKind === "windowingStrategy") + WindowingStrategy.internalBinaryWrite(message.root.windowingStrategy, writer.tag(13, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MessageWithComponents + */ +export const MessageWithComponents = new MessageWithComponents$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExecutableStagePayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExecutableStagePayload", [ + { no: 1, name: "environment", kind: "message", T: () => Environment }, + { no: 9, name: "wire_coder_settings", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ExecutableStagePayload_WireCoderSetting }, + { no: 2, name: "input", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "side_inputs", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ExecutableStagePayload_SideInputId }, + { no: 4, name: "transforms", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 5, name: "outputs", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 6, name: "components", kind: "message", T: () => Components }, + { no: 7, name: "user_states", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ExecutableStagePayload_UserStateId }, + { no: 8, name: "timers", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ExecutableStagePayload_TimerId }, + { no: 10, name: "timerFamilies", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => ExecutableStagePayload_TimerFamilyId } + ]); + } + create(value?: PartialMessage): ExecutableStagePayload { + const message = { wireCoderSettings: [], input: "", sideInputs: [], transforms: [], outputs: [], userStates: [], timers: [], timerFamilies: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExecutableStagePayload): ExecutableStagePayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Environment environment */ 1: + message.environment = Environment.internalBinaryRead(reader, reader.uint32(), options, message.environment); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.WireCoderSetting wire_coder_settings */ 9: + message.wireCoderSettings.push(ExecutableStagePayload_WireCoderSetting.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* string input */ 2: + message.input = reader.string(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.SideInputId side_inputs */ 3: + message.sideInputs.push(ExecutableStagePayload_SideInputId.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated string transforms */ 4: + message.transforms.push(reader.string()); + break; + case /* repeated string outputs */ 5: + message.outputs.push(reader.string()); + break; + case /* org.apache.beam.model.pipeline.v1.Components components */ 6: + message.components = Components.internalBinaryRead(reader, reader.uint32(), options, message.components); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.UserStateId user_states */ 7: + message.userStates.push(ExecutableStagePayload_UserStateId.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId timers */ 8: + message.timers.push(ExecutableStagePayload_TimerId.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerFamilyId timerFamilies */ 10: + message.timerFamilies.push(ExecutableStagePayload_TimerFamilyId.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExecutableStagePayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Environment environment = 1; */ + if (message.environment) + Environment.internalBinaryWrite(message.environment, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.WireCoderSetting wire_coder_settings = 9; */ + for (let i = 0; i < message.wireCoderSettings.length; i++) + ExecutableStagePayload_WireCoderSetting.internalBinaryWrite(message.wireCoderSettings[i], writer.tag(9, WireType.LengthDelimited).fork(), options).join(); + /* string input = 2; */ + if (message.input !== "") + writer.tag(2, WireType.LengthDelimited).string(message.input); + /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.SideInputId side_inputs = 3; */ + for (let i = 0; i < message.sideInputs.length; i++) + ExecutableStagePayload_SideInputId.internalBinaryWrite(message.sideInputs[i], writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* repeated string transforms = 4; */ + for (let i = 0; i < message.transforms.length; i++) + writer.tag(4, WireType.LengthDelimited).string(message.transforms[i]); + /* repeated string outputs = 5; */ + for (let i = 0; i < message.outputs.length; i++) + writer.tag(5, WireType.LengthDelimited).string(message.outputs[i]); + /* org.apache.beam.model.pipeline.v1.Components components = 6; */ + if (message.components) + Components.internalBinaryWrite(message.components, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.UserStateId user_states = 7; */ + for (let i = 0; i < message.userStates.length; i++) + ExecutableStagePayload_UserStateId.internalBinaryWrite(message.userStates[i], writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId timers = 8; */ + for (let i = 0; i < message.timers.length; i++) + ExecutableStagePayload_TimerId.internalBinaryWrite(message.timers[i], writer.tag(8, WireType.LengthDelimited).fork(), options).join(); + /* repeated org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerFamilyId timerFamilies = 10; */ + for (let i = 0; i < message.timerFamilies.length; i++) + ExecutableStagePayload_TimerFamilyId.internalBinaryWrite(message.timerFamilies[i], writer.tag(10, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload + */ +export const ExecutableStagePayload = new ExecutableStagePayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExecutableStagePayload_SideInputId$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExecutableStagePayload.SideInputId", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "local_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ExecutableStagePayload_SideInputId { + const message = { transformId: "", localName: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExecutableStagePayload_SideInputId): ExecutableStagePayload_SideInputId { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string local_name */ 2: + message.localName = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExecutableStagePayload_SideInputId, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string local_name = 2; */ + if (message.localName !== "") + writer.tag(2, WireType.LengthDelimited).string(message.localName); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.SideInputId + */ +export const ExecutableStagePayload_SideInputId = new ExecutableStagePayload_SideInputId$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExecutableStagePayload_UserStateId$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExecutableStagePayload.UserStateId", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "local_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ExecutableStagePayload_UserStateId { + const message = { transformId: "", localName: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExecutableStagePayload_UserStateId): ExecutableStagePayload_UserStateId { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string local_name */ 2: + message.localName = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExecutableStagePayload_UserStateId, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string local_name = 2; */ + if (message.localName !== "") + writer.tag(2, WireType.LengthDelimited).string(message.localName); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.UserStateId + */ +export const ExecutableStagePayload_UserStateId = new ExecutableStagePayload_UserStateId$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExecutableStagePayload_TimerId$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "local_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ExecutableStagePayload_TimerId { + const message = { transformId: "", localName: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExecutableStagePayload_TimerId): ExecutableStagePayload_TimerId { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string local_name */ 2: + message.localName = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExecutableStagePayload_TimerId, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string local_name = 2; */ + if (message.localName !== "") + writer.tag(2, WireType.LengthDelimited).string(message.localName); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId + */ +export const ExecutableStagePayload_TimerId = new ExecutableStagePayload_TimerId$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExecutableStagePayload_TimerFamilyId$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerFamilyId", [ + { no: 1, name: "transform_id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "local_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): ExecutableStagePayload_TimerFamilyId { + const message = { transformId: "", localName: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExecutableStagePayload_TimerFamilyId): ExecutableStagePayload_TimerFamilyId { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string transform_id */ 1: + message.transformId = reader.string(); + break; + case /* string local_name */ 2: + message.localName = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExecutableStagePayload_TimerFamilyId, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string transform_id = 1; */ + if (message.transformId !== "") + writer.tag(1, WireType.LengthDelimited).string(message.transformId); + /* string local_name = 2; */ + if (message.localName !== "") + writer.tag(2, WireType.LengthDelimited).string(message.localName); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerFamilyId + */ +export const ExecutableStagePayload_TimerFamilyId = new ExecutableStagePayload_TimerFamilyId$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExecutableStagePayload_WireCoderSetting$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExecutableStagePayload.WireCoderSetting", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 3, name: "input_or_output_id", kind: "scalar", oneof: "target", T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "timer", kind: "message", oneof: "target", T: () => ExecutableStagePayload_TimerId } + ]); + } + create(value?: PartialMessage): ExecutableStagePayload_WireCoderSetting { + const message = { urn: "", payload: new Uint8Array(0), target: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExecutableStagePayload_WireCoderSetting): ExecutableStagePayload_WireCoderSetting { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* bytes payload */ 2: + message.payload = reader.bytes(); + break; + case /* string input_or_output_id */ 3: + message.target = { + oneofKind: "inputOrOutputId", + inputOrOutputId: reader.string() + }; + break; + case /* org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId timer */ 4: + message.target = { + oneofKind: "timer", + timer: ExecutableStagePayload_TimerId.internalBinaryRead(reader, reader.uint32(), options, (message.target as any).timer) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExecutableStagePayload_WireCoderSetting, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* bytes payload = 2; */ + if (message.payload.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.payload); + /* string input_or_output_id = 3; */ + if (message.target.oneofKind === "inputOrOutputId") + writer.tag(3, WireType.LengthDelimited).string(message.target.inputOrOutputId); + /* org.apache.beam.model.pipeline.v1.ExecutableStagePayload.TimerId timer = 4; */ + if (message.target.oneofKind === "timer") + ExecutableStagePayload_TimerId.internalBinaryWrite(message.target.timer, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExecutableStagePayload.WireCoderSetting + */ +export const ExecutableStagePayload_WireCoderSetting = new ExecutableStagePayload_WireCoderSetting$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class StandardResourceHints$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.StandardResourceHints", []); + } + create(value?: PartialMessage): StandardResourceHints { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: StandardResourceHints): StandardResourceHints { + return target ?? this.create(); + } + internalBinaryWrite(message: StandardResourceHints, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.StandardResourceHints + */ +export const StandardResourceHints = new StandardResourceHints$Type(); +/** + * @generated ServiceType for protobuf service org.apache.beam.model.pipeline.v1.TestStreamService + */ +export const TestStreamService = new ServiceType("org.apache.beam.model.pipeline.v1.TestStreamService", [ + { name: "Events", serverStreaming: true, options: {}, I: EventsRequest, O: TestStreamPayload_Event } +]); diff --git a/sdks/node-ts/src/apache_beam/proto/endpoints.ts b/sdks/node-ts/src/apache_beam/proto/endpoints.ts new file mode 100644 index 000000000000..38ac685b2540 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/endpoints.ts @@ -0,0 +1,185 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "endpoints.proto" (package "org.apache.beam.model.pipeline.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing endpoints containing a service. +// +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * A description of how to connect to a Beam API endpoint. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ApiServiceDescriptor + */ +export interface ApiServiceDescriptor { + /** + * (Required) The URL to connect to. + * + * @generated from protobuf field: string url = 1; + */ + url: string; + /** + * (Optional) The method for authentication. If unspecified, access to the + * url is already being performed in a trusted context (e.g. localhost, + * private network). + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.AuthenticationSpec authentication = 2; + */ + authentication?: AuthenticationSpec; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.AuthenticationSpec + */ +export interface AuthenticationSpec { + /** + * (Required) A URN that describes the accompanying payload. + * For any URN that is not recognized (by whomever is inspecting + * it) the parameter payload should be treated as opaque and + * passed as-is. + * + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * (Optional) The data specifying any parameters to the URN. If + * the URN does not require any arguments, this may be omitted. + * + * @generated from protobuf field: bytes payload = 2; + */ + payload: Uint8Array; +} +// @generated message type with reflection information, may provide speed optimized methods +class ApiServiceDescriptor$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ApiServiceDescriptor", [ + { no: 1, name: "url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "authentication", kind: "message", T: () => AuthenticationSpec } + ]); + } + create(value?: PartialMessage): ApiServiceDescriptor { + const message = { url: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ApiServiceDescriptor): ApiServiceDescriptor { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string url */ 1: + message.url = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.AuthenticationSpec authentication */ 2: + message.authentication = AuthenticationSpec.internalBinaryRead(reader, reader.uint32(), options, message.authentication); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ApiServiceDescriptor, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string url = 1; */ + if (message.url !== "") + writer.tag(1, WireType.LengthDelimited).string(message.url); + /* org.apache.beam.model.pipeline.v1.AuthenticationSpec authentication = 2; */ + if (message.authentication) + AuthenticationSpec.internalBinaryWrite(message.authentication, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ApiServiceDescriptor + */ +export const ApiServiceDescriptor = new ApiServiceDescriptor$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class AuthenticationSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.AuthenticationSpec", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): AuthenticationSpec { + const message = { urn: "", payload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: AuthenticationSpec): AuthenticationSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* bytes payload */ 2: + message.payload = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: AuthenticationSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* bytes payload = 2; */ + if (message.payload.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.payload); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.AuthenticationSpec + */ +export const AuthenticationSpec = new AuthenticationSpec$Type(); diff --git a/sdks/node-ts/src/apache_beam/proto/external_transforms.ts b/sdks/node-ts/src/apache_beam/proto/external_transforms.ts new file mode 100644 index 000000000000..a286dd5f049f --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/external_transforms.ts @@ -0,0 +1,378 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "external_transforms.proto" (package "org.apache.beam.model.pipeline.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers describing the external transforms available. +// +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Schema } from "./schema"; +/** + * A configuration payload for an external transform. + * Used as the payload of ExternalTransform as part of an ExpansionRequest. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExternalConfigurationPayload + */ +export interface ExternalConfigurationPayload { + /** + * A schema for use in beam:coder:row:v1 + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Schema schema = 1; + */ + schema?: Schema; + /** + * A payload which can be decoded using beam:coder:row:v1 and the given + * schema. + * + * @generated from protobuf field: bytes payload = 2; + */ + payload: Uint8Array; +} +/** + * Defines specific expansion methods that may be used to expand cross-language + * transforms. + * Has to be set as the URN of the transform of the expansion request. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ExpansionMethods + */ +export interface ExpansionMethods { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.ExpansionMethods.Enum + */ +export enum ExpansionMethods_Enum { + /** + * Expand a Java transform using specified constructor and builder methods. + * Transform payload will be of type JavaClassLookupPayload. + * + * @generated from protobuf enum value: JAVA_CLASS_LOOKUP = 0; + */ + JAVA_CLASS_LOOKUP = 0 +} +/** + * A configuration payload for an external transform. + * Used to define a Java transform that can be directly instantiated by a Java + * expansion service. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.JavaClassLookupPayload + */ +export interface JavaClassLookupPayload { + /** + * Name of the Java transform class. + * + * @generated from protobuf field: string class_name = 1; + */ + className: string; + /** + * A static method to construct the initial instance of the transform. + * If not provided, the transform should be instantiated using a class + * constructor. + * + * @generated from protobuf field: string constructor_method = 2; + */ + constructorMethod: string; + /** + * The top level fields of the schema represent the method parameters in + * order. + * If able, top level field names are also verified against the method + * parameters for a match. + * Any field names in the form 'ignore[0-9]+' will not be used for validation + * hence that format can be used to represent arbitrary field names. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Schema constructor_schema = 3; + */ + constructorSchema?: Schema; + /** + * A payload which can be decoded using beam:coder:row:v1 and the provided + * constructor schema. + * + * @generated from protobuf field: bytes constructor_payload = 4; + */ + constructorPayload: Uint8Array; + /** + * Set of builder methods and corresponding parameters to apply after the + * transform object is constructed. + * When constructing the transform object, given builder methods will be + * applied in order. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.BuilderMethod builder_methods = 5; + */ + builderMethods: BuilderMethod[]; +} +/** + * This represents a builder method of the transform class that should be + * applied in-order after instantiating the initial transform object. + * Each builder method may take one or more parameters and has to return an + * instance of the transform object. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.BuilderMethod + */ +export interface BuilderMethod { + /** + * Name of the builder method + * + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * The top level fields of the schema represent the method parameters in + * order. + * If able, top level field names are also verified against the method + * parameters for a match. + * Any field names in the form 'ignore[0-9]+' will not be used for validation + * hence that format can be used to represent arbitrary field names. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Schema schema = 2; + */ + schema?: Schema; + /** + * A payload which can be decoded using beam:coder:row:v1 and the builder + * method schema. + * + * @generated from protobuf field: bytes payload = 3; + */ + payload: Uint8Array; +} +// @generated message type with reflection information, may provide speed optimized methods +class ExternalConfigurationPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExternalConfigurationPayload", [ + { no: 1, name: "schema", kind: "message", T: () => Schema }, + { no: 2, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): ExternalConfigurationPayload { + const message = { payload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExternalConfigurationPayload): ExternalConfigurationPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Schema schema */ 1: + message.schema = Schema.internalBinaryRead(reader, reader.uint32(), options, message.schema); + break; + case /* bytes payload */ 2: + message.payload = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ExternalConfigurationPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Schema schema = 1; */ + if (message.schema) + Schema.internalBinaryWrite(message.schema, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* bytes payload = 2; */ + if (message.payload.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.payload); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExternalConfigurationPayload + */ +export const ExternalConfigurationPayload = new ExternalConfigurationPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ExpansionMethods$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ExpansionMethods", []); + } + create(value?: PartialMessage): ExpansionMethods { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ExpansionMethods): ExpansionMethods { + return target ?? this.create(); + } + internalBinaryWrite(message: ExpansionMethods, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ExpansionMethods + */ +export const ExpansionMethods = new ExpansionMethods$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class JavaClassLookupPayload$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.JavaClassLookupPayload", [ + { no: 1, name: "class_name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "constructor_method", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "constructor_schema", kind: "message", T: () => Schema }, + { no: 4, name: "constructor_payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 5, name: "builder_methods", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => BuilderMethod } + ]); + } + create(value?: PartialMessage): JavaClassLookupPayload { + const message = { className: "", constructorMethod: "", constructorPayload: new Uint8Array(0), builderMethods: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: JavaClassLookupPayload): JavaClassLookupPayload { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string class_name */ 1: + message.className = reader.string(); + break; + case /* string constructor_method */ 2: + message.constructorMethod = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.Schema constructor_schema */ 3: + message.constructorSchema = Schema.internalBinaryRead(reader, reader.uint32(), options, message.constructorSchema); + break; + case /* bytes constructor_payload */ 4: + message.constructorPayload = reader.bytes(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.BuilderMethod builder_methods */ 5: + message.builderMethods.push(BuilderMethod.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: JavaClassLookupPayload, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string class_name = 1; */ + if (message.className !== "") + writer.tag(1, WireType.LengthDelimited).string(message.className); + /* string constructor_method = 2; */ + if (message.constructorMethod !== "") + writer.tag(2, WireType.LengthDelimited).string(message.constructorMethod); + /* org.apache.beam.model.pipeline.v1.Schema constructor_schema = 3; */ + if (message.constructorSchema) + Schema.internalBinaryWrite(message.constructorSchema, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* bytes constructor_payload = 4; */ + if (message.constructorPayload.length) + writer.tag(4, WireType.LengthDelimited).bytes(message.constructorPayload); + /* repeated org.apache.beam.model.pipeline.v1.BuilderMethod builder_methods = 5; */ + for (let i = 0; i < message.builderMethods.length; i++) + BuilderMethod.internalBinaryWrite(message.builderMethods[i], writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.JavaClassLookupPayload + */ +export const JavaClassLookupPayload = new JavaClassLookupPayload$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class BuilderMethod$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.BuilderMethod", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "schema", kind: "message", T: () => Schema }, + { no: 3, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ } + ]); + } + create(value?: PartialMessage): BuilderMethod { + const message = { name: "", payload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: BuilderMethod): BuilderMethod { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.Schema schema */ 2: + message.schema = Schema.internalBinaryRead(reader, reader.uint32(), options, message.schema); + break; + case /* bytes payload */ 3: + message.payload = reader.bytes(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: BuilderMethod, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + /* org.apache.beam.model.pipeline.v1.Schema schema = 2; */ + if (message.schema) + Schema.internalBinaryWrite(message.schema, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + /* bytes payload = 3; */ + if (message.payload.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.payload); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.BuilderMethod + */ +export const BuilderMethod = new BuilderMethod$Type(); diff --git a/sdks/node-ts/src/apache_beam/proto/metrics.ts b/sdks/node-ts/src/apache_beam/proto/metrics.ts new file mode 100644 index 000000000000..bfbca30ec290 --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/metrics.ts @@ -0,0 +1,876 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "metrics.proto" (package "org.apache.beam.model.pipeline.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// +// Protocol Buffers for metrics classes, used in the Fn API, Job API, and by SDKs. +// +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +import { Timestamp } from "./google/protobuf/timestamp"; +/** + * A specification for describing a well known MonitoringInfo. + * + * All specifications are uniquely identified by the urn. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoSpec + */ +export interface MonitoringInfoSpec { + /** + * Defines the semantic meaning of the metric or monitored state. + * + * See MonitoringInfoSpecs.Enum for the set of well known metrics/monitored + * state. + * + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * Defines the required encoding and aggregation method for the payload. + * + * See MonitoringInfoTypeUrns.Enum for the set of well known types. + * + * @generated from protobuf field: string type = 2; + */ + type: string; + /** + * The list of required labels for the specified urn and type. + * + * @generated from protobuf field: repeated string required_labels = 3; + */ + requiredLabels: string[]; + /** + * Extra non functional parts of the spec for descriptive purposes. + * i.e. description, units, etc. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Annotation annotations = 4; + */ + annotations: Annotation[]; +} +/** + * The key name and value string of MonitoringInfo annotations. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Annotation + */ +export interface Annotation { + /** + * @generated from protobuf field: string key = 1; + */ + key: string; + /** + * @generated from protobuf field: string value = 2; + */ + value: string; +} +/** + * A set of well known MonitoringInfo specifications. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoSpecs + */ +export interface MonitoringInfoSpecs { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.MonitoringInfoSpecs.Enum + */ +export enum MonitoringInfoSpecs_Enum { + /** + * Represents an integer counter where values are summed across bundles. + * + * @generated from protobuf enum value: USER_SUM_INT64 = 0; + */ + USER_SUM_INT64 = 0, + /** + * Represents a double counter where values are summed across bundles. + * + * @generated from protobuf enum value: USER_SUM_DOUBLE = 1; + */ + USER_SUM_DOUBLE = 1, + /** + * Represents a distribution of an integer value where: + * - count: represents the number of values seen across all bundles + * - sum: represents the total of the value across all bundles + * - min: represents the smallest value seen across all bundles + * - max: represents the largest value seen across all bundles + * + * @generated from protobuf enum value: USER_DISTRIBUTION_INT64 = 2; + */ + USER_DISTRIBUTION_INT64 = 2, + /** + * Represents a distribution of a double value where: + * - count: represents the number of values seen across all bundles + * - sum: represents the total of the value across all bundles + * - min: represents the smallest value seen across all bundles + * - max: represents the largest value seen across all bundles + * + * @generated from protobuf enum value: USER_DISTRIBUTION_DOUBLE = 3; + */ + USER_DISTRIBUTION_DOUBLE = 3, + /** + * Represents the latest seen integer value. The timestamp is used to + * provide an "ordering" over multiple values to determine which is the + * latest. + * + * @generated from protobuf enum value: USER_LATEST_INT64 = 4; + */ + USER_LATEST_INT64 = 4, + /** + * Represents the latest seen double value. The timestamp is used to + * provide an "ordering" over multiple values to determine which is the + * latest. + * + * @generated from protobuf enum value: USER_LATEST_DOUBLE = 5; + */ + USER_LATEST_DOUBLE = 5, + /** + * Represents the largest set of integer values seen across bundles. + * + * @generated from protobuf enum value: USER_TOP_N_INT64 = 6; + */ + USER_TOP_N_INT64 = 6, + /** + * Represents the largest set of double values seen across bundles. + * + * @generated from protobuf enum value: USER_TOP_N_DOUBLE = 7; + */ + USER_TOP_N_DOUBLE = 7, + /** + * Represents the smallest set of integer values seen across bundles. + * + * @generated from protobuf enum value: USER_BOTTOM_N_INT64 = 8; + */ + USER_BOTTOM_N_INT64 = 8, + /** + * Represents the smallest set of double values seen across bundles. + * + * @generated from protobuf enum value: USER_BOTTOM_N_DOUBLE = 9; + */ + USER_BOTTOM_N_DOUBLE = 9, + /** + * @generated from protobuf enum value: ELEMENT_COUNT = 10; + */ + ELEMENT_COUNT = 10, + /** + * @generated from protobuf enum value: SAMPLED_BYTE_SIZE = 11; + */ + SAMPLED_BYTE_SIZE = 11, + /** + * @generated from protobuf enum value: START_BUNDLE_MSECS = 12; + */ + START_BUNDLE_MSECS = 12, + /** + * @generated from protobuf enum value: PROCESS_BUNDLE_MSECS = 13; + */ + PROCESS_BUNDLE_MSECS = 13, + /** + * @generated from protobuf enum value: FINISH_BUNDLE_MSECS = 14; + */ + FINISH_BUNDLE_MSECS = 14, + /** + * @generated from protobuf enum value: TOTAL_MSECS = 15; + */ + TOTAL_MSECS = 15, + /** + * All values reported across all beam:metric:ptransform_progress:.*:v1 + * metrics are of the same magnitude. + * + * @generated from protobuf enum value: WORK_REMAINING = 16; + */ + WORK_REMAINING = 16, + /** + * All values reported across all beam:metric:ptransform_progress:.*:v1 + * metrics are of the same magnitude. + * + * @generated from protobuf enum value: WORK_COMPLETED = 17; + */ + WORK_COMPLETED = 17, + /** + * The (0-based) index of the latest item processed from the data channel. + * This gives an indication of the SDKs progress through the data channel, + * and is a lower bound on where it is able to split. + * For an SDK that processes items sequentially, this is equivalently the + * number of items fully processed (or -1 if processing has not yet started). + * + * @generated from protobuf enum value: DATA_CHANNEL_READ_INDEX = 18; + */ + DATA_CHANNEL_READ_INDEX = 18, + /** + * @generated from protobuf enum value: API_REQUEST_COUNT = 19; + */ + API_REQUEST_COUNT = 19, + /** + * @generated from protobuf enum value: API_REQUEST_LATENCIES = 20; + */ + API_REQUEST_LATENCIES = 20 +} +/** + * A set of properties for the MonitoringInfoLabel, this is useful to obtain + * the proper label string for the MonitoringInfoLabel. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoLabelProps + */ +export interface MonitoringInfoLabelProps { + /** + * The label key to use in the MonitoringInfo labels map. + * + * @generated from protobuf field: string name = 1; + */ + name: string; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfo + */ +export interface MonitoringInfo { + /** + * (Required) Defines the semantic meaning of the metric or monitored state. + * + * See MonitoringInfoSpecs.Enum for the set of well known metrics/monitored + * state. + * + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * (Required) Defines the encoding and aggregation method for the payload. + * + * See MonitoringInfoTypeUrns.Enum for the set of well known types. + * + * @generated from protobuf field: string type = 2; + */ + type: string; + /** + * (Required) The metric or monitored state encoded as per the specification + * defined by the type. + * + * @generated from protobuf field: bytes payload = 3; + */ + payload: Uint8Array; + /** + * A set of key and value labels which define the scope of the metric. For + * well known URNs, the set of required labels is provided by the associated + * MonitoringInfoSpec. + * + * Either a well defined entity id for matching the enum names in + * the MonitoringInfoLabels enum or any arbitrary label + * set by a custom metric or user metric. + * + * A monitoring system is expected to be able to aggregate the metrics + * together for all updates having the same URN and labels. Some systems such + * as Stackdriver will be able to aggregate the metrics using a subset of the + * provided labels + * + * @generated from protobuf field: map labels = 4; + */ + labels: { + [key: string]: string; + }; + /** + * This indicates the start of the time range over which this value was + * measured. + * This is needed by some external metric aggregation services + * to indicate when the reporter of the metric first began collecting the + * cumulative value for the timeseries. + * If the SDK Harness restarts, it should reset the start_time, and reset + * the collection of cumulative metrics (i.e. start to count again from 0). + * HarnessMonitoringInfos should set this start_time once, when the + * MonitoringInfo is first reported. + * ProcessBundle MonitoringInfos should set a start_time for each bundle. + * + * @generated from protobuf field: google.protobuf.Timestamp start_time = 5; + */ + startTime?: Timestamp; +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.MonitoringInfo.MonitoringInfoLabels + */ +export enum MonitoringInfo_MonitoringInfoLabels { + /** + * The values used for TRANSFORM, PCOLLECTION, WINDOWING_STRATEGY + * CODER, ENVIRONMENT, etc. must always match the keys used to + * refer to them. For actively processed bundles, these should match the + * values within the ProcessBundleDescriptor. For job management APIs, + * these should match values within the original pipeline representation. + * + * @generated from protobuf enum value: TRANSFORM = 0; + */ + TRANSFORM = 0, + /** + * @generated from protobuf enum value: PCOLLECTION = 1; + */ + PCOLLECTION = 1, + /** + * @generated from protobuf enum value: WINDOWING_STRATEGY = 2; + */ + WINDOWING_STRATEGY = 2, + /** + * @generated from protobuf enum value: CODER = 3; + */ + CODER = 3, + /** + * @generated from protobuf enum value: ENVIRONMENT = 4; + */ + ENVIRONMENT = 4, + /** + * @generated from protobuf enum value: NAMESPACE = 5; + */ + NAMESPACE = 5, + /** + * @generated from protobuf enum value: NAME = 6; + */ + NAME = 6, + /** + * @generated from protobuf enum value: SERVICE = 7; + */ + SERVICE = 7, + /** + * @generated from protobuf enum value: METHOD = 8; + */ + METHOD = 8, + /** + * @generated from protobuf enum value: RESOURCE = 9; + */ + RESOURCE = 9, + /** + * @generated from protobuf enum value: STATUS = 10; + */ + STATUS = 10, + /** + * @generated from protobuf enum value: BIGQUERY_PROJECT_ID = 11; + */ + BIGQUERY_PROJECT_ID = 11, + /** + * @generated from protobuf enum value: BIGQUERY_DATASET = 12; + */ + BIGQUERY_DATASET = 12, + /** + * @generated from protobuf enum value: BIGQUERY_TABLE = 13; + */ + BIGQUERY_TABLE = 13, + /** + * @generated from protobuf enum value: BIGQUERY_VIEW = 14; + */ + BIGQUERY_VIEW = 14, + /** + * @generated from protobuf enum value: BIGQUERY_QUERY_NAME = 15; + */ + BIGQUERY_QUERY_NAME = 15, + /** + * @generated from protobuf enum value: GCS_BUCKET = 16; + */ + GCS_BUCKET = 16, + /** + * @generated from protobuf enum value: GCS_PROJECT_ID = 17; + */ + GCS_PROJECT_ID = 17, + /** + * @generated from protobuf enum value: DATASTORE_PROJECT = 18; + */ + DATASTORE_PROJECT = 18, + /** + * @generated from protobuf enum value: DATASTORE_NAMESPACE = 19; + */ + DATASTORE_NAMESPACE = 19, + /** + * @generated from protobuf enum value: BIGTABLE_PROJECT_ID = 20; + */ + BIGTABLE_PROJECT_ID = 20, + /** + * @generated from protobuf enum value: INSTANCE_ID = 21; + */ + INSTANCE_ID = 21, + /** + * @generated from protobuf enum value: TABLE_ID = 22; + */ + TABLE_ID = 22, + /** + * @generated from protobuf enum value: SPANNER_PROJECT_ID = 23; + */ + SPANNER_PROJECT_ID = 23, + /** + * @generated from protobuf enum value: SPANNER_DATABASE_ID = 24; + */ + SPANNER_DATABASE_ID = 24, + /** + * @generated from protobuf enum value: SPANNER_TABLE_ID = 25; + */ + SPANNER_TABLE_ID = 25, + /** + * @generated from protobuf enum value: SPANNER_INSTANCE_ID = 26; + */ + SPANNER_INSTANCE_ID = 26, + /** + * @generated from protobuf enum value: SPANNER_QUERY_NAME = 27; + */ + SPANNER_QUERY_NAME = 27 +} +/** + * A set of well known URNs that specify the encoding and aggregation method. + * + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoTypeUrns + */ +export interface MonitoringInfoTypeUrns { +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.MonitoringInfoTypeUrns.Enum + */ +export enum MonitoringInfoTypeUrns_Enum { + /** + * Represents an integer counter where values are summed across bundles. + * + * Encoding: + * - value: beam:coder:varint:v1 + * + * @generated from protobuf enum value: SUM_INT64_TYPE = 0; + */ + SUM_INT64_TYPE = 0, + /** + * Represents a double counter where values are summed across bundles. + * + * Encoding: + * value: beam:coder:double:v1 + * + * @generated from protobuf enum value: SUM_DOUBLE_TYPE = 1; + */ + SUM_DOUBLE_TYPE = 1, + /** + * Represents a distribution of an integer value where: + * - count: represents the number of values seen across all bundles + * - sum: represents the total of the value across all bundles + * - min: represents the smallest value seen across all bundles + * - max: represents the largest value seen across all bundles + * + * Encoding: + * - count: beam:coder:varint:v1 + * - sum: beam:coder:varint:v1 + * - min: beam:coder:varint:v1 + * - max: beam:coder:varint:v1 + * + * @generated from protobuf enum value: DISTRIBUTION_INT64_TYPE = 2; + */ + DISTRIBUTION_INT64_TYPE = 2, + /** + * Represents a distribution of a double value where: + * - count: represents the number of values seen across all bundles + * - sum: represents the total of the value across all bundles + * - min: represents the smallest value seen across all bundles + * - max: represents the largest value seen across all bundles + * + * Encoding: + * - count: beam:coder:varint:v1 + * - sum: beam:coder:double:v1 + * - min: beam:coder:double:v1 + * - max: beam:coder:double:v1 + * + * @generated from protobuf enum value: DISTRIBUTION_DOUBLE_TYPE = 3; + */ + DISTRIBUTION_DOUBLE_TYPE = 3, + /** + * Represents the latest seen integer value. The timestamp is used to + * provide an "ordering" over multiple values to determine which is the + * latest. + * + * Encoding: + * - timestamp: beam:coder:varint:v1 (milliseconds since epoch) + * - value: beam:coder:varint:v1 + * + * @generated from protobuf enum value: LATEST_INT64_TYPE = 4; + */ + LATEST_INT64_TYPE = 4, + /** + * Represents the latest seen double value. The timestamp is used to + * provide an "ordering" over multiple values to determine which is the + * latest. + * + * Encoding: + * - timestamp: beam:coder:varint:v1 (milliseconds since epoch) + * - value: beam:coder:double:v1 + * + * @generated from protobuf enum value: LATEST_DOUBLE_TYPE = 5; + */ + LATEST_DOUBLE_TYPE = 5, + /** + * Represents the largest set of integer values seen across bundles. + * + * Encoding: ... + * - iter: beam:coder:iterable:v1 + * - valueX: beam:coder:varint:v1 + * + * @generated from protobuf enum value: TOP_N_INT64_TYPE = 6; + */ + TOP_N_INT64_TYPE = 6, + /** + * Represents the largest set of double values seen across bundles. + * + * Encoding: ... + * - iter: beam:coder:iterable:v1 + * - valueX: beam:coder... + * - iter: beam:coder:iterable:v1 + * - valueX: beam:coder:varint:v1 + * + * @generated from protobuf enum value: BOTTOM_N_INT64_TYPE = 8; + */ + BOTTOM_N_INT64_TYPE = 8, + /** + * Represents the smallest set of double values seen across bundles. + * + * Encoding: ... + * - iter: beam:coder:iterable:v1 + * - valueX: beam:coder:double:v1 + * + * @generated from protobuf enum value: BOTTOM_N_DOUBLE_TYPE = 9; + */ + BOTTOM_N_DOUBLE_TYPE = 9, + /** + * Encoding: ... + * - iter: beam:coder:iterable:v1 + * - valueX: beam:coder:double:v1 + * + * @generated from protobuf enum value: PROGRESS_TYPE = 10; + */ + PROGRESS_TYPE = 10 +} +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfoSpec$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MonitoringInfoSpec", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "type", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "required_labels", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, + { no: 4, name: "annotations", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Annotation } + ]); + } + create(value?: PartialMessage): MonitoringInfoSpec { + const message = { urn: "", type: "", requiredLabels: [], annotations: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfoSpec): MonitoringInfoSpec { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* string type */ 2: + message.type = reader.string(); + break; + case /* repeated string required_labels */ 3: + message.requiredLabels.push(reader.string()); + break; + case /* repeated org.apache.beam.model.pipeline.v1.Annotation annotations */ 4: + message.annotations.push(Annotation.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MonitoringInfoSpec, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* string type = 2; */ + if (message.type !== "") + writer.tag(2, WireType.LengthDelimited).string(message.type); + /* repeated string required_labels = 3; */ + for (let i = 0; i < message.requiredLabels.length; i++) + writer.tag(3, WireType.LengthDelimited).string(message.requiredLabels[i]); + /* repeated org.apache.beam.model.pipeline.v1.Annotation annotations = 4; */ + for (let i = 0; i < message.annotations.length; i++) + Annotation.internalBinaryWrite(message.annotations[i], writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoSpec + */ +export const MonitoringInfoSpec = new MonitoringInfoSpec$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Annotation$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Annotation", [ + { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "value", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): Annotation { + const message = { key: "", value: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Annotation): Annotation { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string key */ 1: + message.key = reader.string(); + break; + case /* string value */ 2: + message.value = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Annotation, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string key = 1; */ + if (message.key !== "") + writer.tag(1, WireType.LengthDelimited).string(message.key); + /* string value = 2; */ + if (message.value !== "") + writer.tag(2, WireType.LengthDelimited).string(message.value); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Annotation + */ +export const Annotation = new Annotation$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfoSpecs$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MonitoringInfoSpecs", []); + } + create(value?: PartialMessage): MonitoringInfoSpecs { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfoSpecs): MonitoringInfoSpecs { + return target ?? this.create(); + } + internalBinaryWrite(message: MonitoringInfoSpecs, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoSpecs + */ +export const MonitoringInfoSpecs = new MonitoringInfoSpecs$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfoLabelProps$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MonitoringInfoLabelProps", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ } + ]); + } + create(value?: PartialMessage): MonitoringInfoLabelProps { + const message = { name: "" }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfoLabelProps): MonitoringInfoLabelProps { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MonitoringInfoLabelProps, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoLabelProps + */ +export const MonitoringInfoLabelProps = new MonitoringInfoLabelProps$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfo$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MonitoringInfo", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "type", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 4, name: "labels", kind: "map", K: 9 /*ScalarType.STRING*/, V: { kind: "scalar", T: 9 /*ScalarType.STRING*/ } }, + { no: 5, name: "start_time", kind: "message", T: () => Timestamp } + ]); + } + create(value?: PartialMessage): MonitoringInfo { + const message = { urn: "", type: "", payload: new Uint8Array(0), labels: {} }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfo): MonitoringInfo { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* string type */ 2: + message.type = reader.string(); + break; + case /* bytes payload */ 3: + message.payload = reader.bytes(); + break; + case /* map labels */ 4: + this.binaryReadMap4(message.labels, reader, options); + break; + case /* google.protobuf.Timestamp start_time */ 5: + message.startTime = Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.startTime); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + private binaryReadMap4(map: MonitoringInfo["labels"], reader: IBinaryReader, options: BinaryReadOptions): void { + let len = reader.uint32(), end = reader.pos + len, key: keyof MonitoringInfo["labels"] | undefined, val: MonitoringInfo["labels"][any] | undefined; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case 1: + key = reader.string(); + break; + case 2: + val = reader.string(); + break; + default: throw new globalThis.Error("unknown map entry field for field org.apache.beam.model.pipeline.v1.MonitoringInfo.labels"); + } + } + map[key ?? ""] = val ?? ""; + } + internalBinaryWrite(message: MonitoringInfo, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* string type = 2; */ + if (message.type !== "") + writer.tag(2, WireType.LengthDelimited).string(message.type); + /* bytes payload = 3; */ + if (message.payload.length) + writer.tag(3, WireType.LengthDelimited).bytes(message.payload); + /* map labels = 4; */ + for (let k of Object.keys(message.labels)) + writer.tag(4, WireType.LengthDelimited).fork().tag(1, WireType.LengthDelimited).string(k).tag(2, WireType.LengthDelimited).string(message.labels[k]).join(); + /* google.protobuf.Timestamp start_time = 5; */ + if (message.startTime) + Timestamp.internalBinaryWrite(message.startTime, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfo + */ +export const MonitoringInfo = new MonitoringInfo$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MonitoringInfoTypeUrns$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MonitoringInfoTypeUrns", []); + } + create(value?: PartialMessage): MonitoringInfoTypeUrns { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MonitoringInfoTypeUrns): MonitoringInfoTypeUrns { + return target ?? this.create(); + } + internalBinaryWrite(message: MonitoringInfoTypeUrns, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MonitoringInfoTypeUrns + */ +export const MonitoringInfoTypeUrns = new MonitoringInfoTypeUrns$Type(); diff --git a/sdks/node-ts/src/apache_beam/proto/schema.ts b/sdks/node-ts/src/apache_beam/proto/schema.ts new file mode 100644 index 000000000000..2021cb4a899a --- /dev/null +++ b/sdks/node-ts/src/apache_beam/proto/schema.ts @@ -0,0 +1,1568 @@ +// @generated by protobuf-ts 2.1.0 +// @generated from protobuf file "schema.proto" (package "org.apache.beam.model.pipeline.v1", syntax proto3) +// tslint:disable +// +// +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// +// ** Experimental ** +// Protocol Buffers describing Beam Schemas, a portable representation for +// complex types. +// +// The primary application of Schema is as the payload for the standard coder +// "beam:coder:row:v1", defined in beam_runner_api.proto +// +import type { BinaryWriteOptions } from "@protobuf-ts/runtime"; +import type { IBinaryWriter } from "@protobuf-ts/runtime"; +import { WireType } from "@protobuf-ts/runtime"; +import type { BinaryReadOptions } from "@protobuf-ts/runtime"; +import type { IBinaryReader } from "@protobuf-ts/runtime"; +import { UnknownFieldHandler } from "@protobuf-ts/runtime"; +import type { PartialMessage } from "@protobuf-ts/runtime"; +import { reflectionMergePartial } from "@protobuf-ts/runtime"; +import { MESSAGE_TYPE } from "@protobuf-ts/runtime"; +import { MessageType } from "@protobuf-ts/runtime"; +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Schema + */ +export interface Schema { + /** + * List of fields for this schema. Two fields may not share a name. + * + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Field fields = 1; + */ + fields: Field[]; + /** + * REQUIRED. An RFC 4122 UUID. + * + * @generated from protobuf field: string id = 2; + */ + id: string; + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Option options = 3; + */ + options: Option[]; + /** + * Indicates that encoding positions have been overridden. + * + * @generated from protobuf field: bool encoding_positions_set = 4; + */ + encodingPositionsSet: boolean; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Field + */ +export interface Field { + /** + * REQUIRED. Name of this field within the schema. + * + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * OPTIONAL. Human readable description of this field, such as the query that generated it. + * + * @generated from protobuf field: string description = 2; + */ + description: string; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType type = 3; + */ + type?: FieldType; + /** + * @generated from protobuf field: int32 id = 4; + */ + id: number; + /** + * OPTIONAL. The position of this field's data when encoded, e.g. with beam:coder:row:v1. + * Either no fields in a given row are have encoding position populated, + * or all of them are. Used to support backwards compatibility with schema + * changes. + * If no fields have encoding position populated the order of encoding is the same as the order in the Schema. + * If this Field is part of a Schema where encoding_positions_set is True then encoding_position must be + * defined, otherwise this field is ignored. + * + * @generated from protobuf field: int32 encoding_position = 5; + */ + encodingPosition: number; + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.Option options = 6; + */ + options: Option[]; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.FieldType + */ +export interface FieldType { + /** + * @generated from protobuf field: bool nullable = 1; + */ + nullable: boolean; + /** + * @generated from protobuf oneof: type_info + */ + typeInfo: { + oneofKind: "atomicType"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.AtomicType atomic_type = 2; + */ + atomicType: AtomicType; + } | { + oneofKind: "arrayType"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ArrayType array_type = 3; + */ + arrayType: ArrayType; + } | { + oneofKind: "iterableType"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.IterableType iterable_type = 4; + */ + iterableType: IterableType; + } | { + oneofKind: "mapType"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.MapType map_type = 5; + */ + mapType: MapType; + } | { + oneofKind: "rowType"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.RowType row_type = 6; + */ + rowType: RowType; + } | { + oneofKind: "logicalType"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.LogicalType logical_type = 7; + */ + logicalType: LogicalType; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ArrayType + */ +export interface ArrayType { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType element_type = 1; + */ + elementType?: FieldType; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.IterableType + */ +export interface IterableType { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType element_type = 1; + */ + elementType?: FieldType; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MapType + */ +export interface MapType { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType key_type = 1; + */ + keyType?: FieldType; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType value_type = 2; + */ + valueType?: FieldType; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.RowType + */ +export interface RowType { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Schema schema = 1; + */ + schema?: Schema; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.LogicalType + */ +export interface LogicalType { + /** + * @generated from protobuf field: string urn = 1; + */ + urn: string; + /** + * @generated from protobuf field: bytes payload = 2; + */ + payload: Uint8Array; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType representation = 3; + */ + representation?: FieldType; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType argument_type = 4; + */ + argumentType?: FieldType; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldValue argument = 5; + */ + argument?: FieldValue; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Option + */ +export interface Option { + /** + * REQUIRED. Identifier for the option. + * + * @generated from protobuf field: string name = 1; + */ + name: string; + /** + * REQUIRED. Type specifer for the structure of value. + * Conventionally, options that don't require additional configuration should + * use a boolean type, with the value set to true. + * + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldType type = 2; + */ + type?: FieldType; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldValue value = 3; + */ + value?: FieldValue; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.Row + */ +export interface Row { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.FieldValue values = 1; + */ + values: FieldValue[]; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.FieldValue + */ +export interface FieldValue { + /** + * @generated from protobuf oneof: field_value + */ + fieldValue: { + oneofKind: "atomicValue"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.AtomicTypeValue atomic_value = 1; + */ + atomicValue: AtomicTypeValue; + } | { + oneofKind: "arrayValue"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.ArrayTypeValue array_value = 2; + */ + arrayValue: ArrayTypeValue; + } | { + oneofKind: "iterableValue"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.IterableTypeValue iterable_value = 3; + */ + iterableValue: IterableTypeValue; + } | { + oneofKind: "mapValue"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.MapTypeValue map_value = 4; + */ + mapValue: MapTypeValue; + } | { + oneofKind: "rowValue"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.Row row_value = 5; + */ + rowValue: Row; + } | { + oneofKind: "logicalTypeValue"; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.LogicalTypeValue logical_type_value = 6; + */ + logicalTypeValue: LogicalTypeValue; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.AtomicTypeValue + */ +export interface AtomicTypeValue { + /** + * @generated from protobuf oneof: value + */ + value: { + oneofKind: "byte"; + /** + * @generated from protobuf field: int32 byte = 1; + */ + byte: number; + } | { + oneofKind: "int16"; + /** + * @generated from protobuf field: int32 int16 = 2; + */ + int16: number; + } | { + oneofKind: "int32"; + /** + * @generated from protobuf field: int32 int32 = 3; + */ + int32: number; + } | { + oneofKind: "int64"; + /** + * @generated from protobuf field: int64 int64 = 4; + */ + int64: bigint; + } | { + oneofKind: "float"; + /** + * @generated from protobuf field: float float = 5; + */ + float: number; + } | { + oneofKind: "double"; + /** + * @generated from protobuf field: double double = 6; + */ + double: number; + } | { + oneofKind: "string"; + /** + * @generated from protobuf field: string string = 7; + */ + string: string; + } | { + oneofKind: "boolean"; + /** + * @generated from protobuf field: bool boolean = 8; + */ + boolean: boolean; + } | { + oneofKind: "bytes"; + /** + * @generated from protobuf field: bytes bytes = 9; + */ + bytes: Uint8Array; + } | { + oneofKind: undefined; + }; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.ArrayTypeValue + */ +export interface ArrayTypeValue { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.FieldValue element = 1; + */ + element: FieldValue[]; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.IterableTypeValue + */ +export interface IterableTypeValue { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.FieldValue element = 1; + */ + element: FieldValue[]; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MapTypeValue + */ +export interface MapTypeValue { + /** + * @generated from protobuf field: repeated org.apache.beam.model.pipeline.v1.MapTypeEntry entries = 1; + */ + entries: MapTypeEntry[]; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.MapTypeEntry + */ +export interface MapTypeEntry { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldValue key = 1; + */ + key?: FieldValue; + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldValue value = 2; + */ + value?: FieldValue; +} +/** + * @generated from protobuf message org.apache.beam.model.pipeline.v1.LogicalTypeValue + */ +export interface LogicalTypeValue { + /** + * @generated from protobuf field: org.apache.beam.model.pipeline.v1.FieldValue value = 1; + */ + value?: FieldValue; +} +/** + * @generated from protobuf enum org.apache.beam.model.pipeline.v1.AtomicType + */ +export enum AtomicType { + /** + * @generated from protobuf enum value: UNSPECIFIED = 0; + */ + UNSPECIFIED = 0, + /** + * @generated from protobuf enum value: BYTE = 1; + */ + BYTE = 1, + /** + * @generated from protobuf enum value: INT16 = 2; + */ + INT16 = 2, + /** + * @generated from protobuf enum value: INT32 = 3; + */ + INT32 = 3, + /** + * @generated from protobuf enum value: INT64 = 4; + */ + INT64 = 4, + /** + * @generated from protobuf enum value: FLOAT = 5; + */ + FLOAT = 5, + /** + * @generated from protobuf enum value: DOUBLE = 6; + */ + DOUBLE = 6, + /** + * @generated from protobuf enum value: STRING = 7; + */ + STRING = 7, + /** + * @generated from protobuf enum value: BOOLEAN = 8; + */ + BOOLEAN = 8, + /** + * @generated from protobuf enum value: BYTES = 9; + */ + BYTES = 9 +} +// @generated message type with reflection information, may provide speed optimized methods +class Schema$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Schema", [ + { no: 1, name: "fields", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Field }, + { no: 2, name: "id", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "options", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Option }, + { no: 4, name: "encoding_positions_set", kind: "scalar", T: 8 /*ScalarType.BOOL*/ } + ]); + } + create(value?: PartialMessage): Schema { + const message = { fields: [], id: "", options: [], encodingPositionsSet: false }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Schema): Schema { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* repeated org.apache.beam.model.pipeline.v1.Field fields */ 1: + message.fields.push(Field.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* string id */ 2: + message.id = reader.string(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.Option options */ 3: + message.options.push(Option.internalBinaryRead(reader, reader.uint32(), options)); + break; + case /* bool encoding_positions_set */ 4: + message.encodingPositionsSet = reader.bool(); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Schema, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* repeated org.apache.beam.model.pipeline.v1.Field fields = 1; */ + for (let i = 0; i < message.fields.length; i++) + Field.internalBinaryWrite(message.fields[i], writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* string id = 2; */ + if (message.id !== "") + writer.tag(2, WireType.LengthDelimited).string(message.id); + /* repeated org.apache.beam.model.pipeline.v1.Option options = 3; */ + for (let i = 0; i < message.options.length; i++) + Option.internalBinaryWrite(message.options[i], writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* bool encoding_positions_set = 4; */ + if (message.encodingPositionsSet !== false) + writer.tag(4, WireType.Varint).bool(message.encodingPositionsSet); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Schema + */ +export const Schema = new Schema$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Field$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.Field", [ + { no: 1, name: "name", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "description", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 3, name: "type", kind: "message", T: () => FieldType }, + { no: 4, name: "id", kind: "scalar", T: 5 /*ScalarType.INT32*/ }, + { no: 5, name: "encoding_position", kind: "scalar", T: 5 /*ScalarType.INT32*/ }, + { no: 6, name: "options", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => Option } + ]); + } + create(value?: PartialMessage): Field { + const message = { name: "", description: "", id: 0, encodingPosition: 0, options: [] }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: Field): Field { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string name */ 1: + message.name = reader.string(); + break; + case /* string description */ 2: + message.description = reader.string(); + break; + case /* org.apache.beam.model.pipeline.v1.FieldType type */ 3: + message.type = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.type); + break; + case /* int32 id */ 4: + message.id = reader.int32(); + break; + case /* int32 encoding_position */ 5: + message.encodingPosition = reader.int32(); + break; + case /* repeated org.apache.beam.model.pipeline.v1.Option options */ 6: + message.options.push(Option.internalBinaryRead(reader, reader.uint32(), options)); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: Field, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string name = 1; */ + if (message.name !== "") + writer.tag(1, WireType.LengthDelimited).string(message.name); + /* string description = 2; */ + if (message.description !== "") + writer.tag(2, WireType.LengthDelimited).string(message.description); + /* org.apache.beam.model.pipeline.v1.FieldType type = 3; */ + if (message.type) + FieldType.internalBinaryWrite(message.type, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* int32 id = 4; */ + if (message.id !== 0) + writer.tag(4, WireType.Varint).int32(message.id); + /* int32 encoding_position = 5; */ + if (message.encodingPosition !== 0) + writer.tag(5, WireType.Varint).int32(message.encodingPosition); + /* repeated org.apache.beam.model.pipeline.v1.Option options = 6; */ + for (let i = 0; i < message.options.length; i++) + Option.internalBinaryWrite(message.options[i], writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.Field + */ +export const Field = new Field$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class FieldType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.FieldType", [ + { no: 1, name: "nullable", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, + { no: 2, name: "atomic_type", kind: "enum", oneof: "typeInfo", T: () => ["org.apache.beam.model.pipeline.v1.AtomicType", AtomicType] }, + { no: 3, name: "array_type", kind: "message", oneof: "typeInfo", T: () => ArrayType }, + { no: 4, name: "iterable_type", kind: "message", oneof: "typeInfo", T: () => IterableType }, + { no: 5, name: "map_type", kind: "message", oneof: "typeInfo", T: () => MapType }, + { no: 6, name: "row_type", kind: "message", oneof: "typeInfo", T: () => RowType }, + { no: 7, name: "logical_type", kind: "message", oneof: "typeInfo", T: () => LogicalType } + ]); + } + create(value?: PartialMessage): FieldType { + const message = { nullable: false, typeInfo: { oneofKind: undefined } }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: FieldType): FieldType { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* bool nullable */ 1: + message.nullable = reader.bool(); + break; + case /* org.apache.beam.model.pipeline.v1.AtomicType atomic_type */ 2: + message.typeInfo = { + oneofKind: "atomicType", + atomicType: reader.int32() + }; + break; + case /* org.apache.beam.model.pipeline.v1.ArrayType array_type */ 3: + message.typeInfo = { + oneofKind: "arrayType", + arrayType: ArrayType.internalBinaryRead(reader, reader.uint32(), options, (message.typeInfo as any).arrayType) + }; + break; + case /* org.apache.beam.model.pipeline.v1.IterableType iterable_type */ 4: + message.typeInfo = { + oneofKind: "iterableType", + iterableType: IterableType.internalBinaryRead(reader, reader.uint32(), options, (message.typeInfo as any).iterableType) + }; + break; + case /* org.apache.beam.model.pipeline.v1.MapType map_type */ 5: + message.typeInfo = { + oneofKind: "mapType", + mapType: MapType.internalBinaryRead(reader, reader.uint32(), options, (message.typeInfo as any).mapType) + }; + break; + case /* org.apache.beam.model.pipeline.v1.RowType row_type */ 6: + message.typeInfo = { + oneofKind: "rowType", + rowType: RowType.internalBinaryRead(reader, reader.uint32(), options, (message.typeInfo as any).rowType) + }; + break; + case /* org.apache.beam.model.pipeline.v1.LogicalType logical_type */ 7: + message.typeInfo = { + oneofKind: "logicalType", + logicalType: LogicalType.internalBinaryRead(reader, reader.uint32(), options, (message.typeInfo as any).logicalType) + }; + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: FieldType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* bool nullable = 1; */ + if (message.nullable !== false) + writer.tag(1, WireType.Varint).bool(message.nullable); + /* org.apache.beam.model.pipeline.v1.AtomicType atomic_type = 2; */ + if (message.typeInfo.oneofKind === "atomicType") + writer.tag(2, WireType.Varint).int32(message.typeInfo.atomicType); + /* org.apache.beam.model.pipeline.v1.ArrayType array_type = 3; */ + if (message.typeInfo.oneofKind === "arrayType") + ArrayType.internalBinaryWrite(message.typeInfo.arrayType, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.IterableType iterable_type = 4; */ + if (message.typeInfo.oneofKind === "iterableType") + IterableType.internalBinaryWrite(message.typeInfo.iterableType, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.MapType map_type = 5; */ + if (message.typeInfo.oneofKind === "mapType") + MapType.internalBinaryWrite(message.typeInfo.mapType, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.RowType row_type = 6; */ + if (message.typeInfo.oneofKind === "rowType") + RowType.internalBinaryWrite(message.typeInfo.rowType, writer.tag(6, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.LogicalType logical_type = 7; */ + if (message.typeInfo.oneofKind === "logicalType") + LogicalType.internalBinaryWrite(message.typeInfo.logicalType, writer.tag(7, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.FieldType + */ +export const FieldType = new FieldType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class ArrayType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.ArrayType", [ + { no: 1, name: "element_type", kind: "message", T: () => FieldType } + ]); + } + create(value?: PartialMessage): ArrayType { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: ArrayType): ArrayType { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FieldType element_type */ 1: + message.elementType = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.elementType); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: ArrayType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FieldType element_type = 1; */ + if (message.elementType) + FieldType.internalBinaryWrite(message.elementType, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.ArrayType + */ +export const ArrayType = new ArrayType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class IterableType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.IterableType", [ + { no: 1, name: "element_type", kind: "message", T: () => FieldType } + ]); + } + create(value?: PartialMessage): IterableType { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: IterableType): IterableType { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FieldType element_type */ 1: + message.elementType = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.elementType); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: IterableType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FieldType element_type = 1; */ + if (message.elementType) + FieldType.internalBinaryWrite(message.elementType, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.IterableType + */ +export const IterableType = new IterableType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class MapType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.MapType", [ + { no: 1, name: "key_type", kind: "message", T: () => FieldType }, + { no: 2, name: "value_type", kind: "message", T: () => FieldType } + ]); + } + create(value?: PartialMessage): MapType { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: MapType): MapType { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.FieldType key_type */ 1: + message.keyType = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.keyType); + break; + case /* org.apache.beam.model.pipeline.v1.FieldType value_type */ 2: + message.valueType = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.valueType); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: MapType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.FieldType key_type = 1; */ + if (message.keyType) + FieldType.internalBinaryWrite(message.keyType, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FieldType value_type = 2; */ + if (message.valueType) + FieldType.internalBinaryWrite(message.valueType, writer.tag(2, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.MapType + */ +export const MapType = new MapType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class RowType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.RowType", [ + { no: 1, name: "schema", kind: "message", T: () => Schema } + ]); + } + create(value?: PartialMessage): RowType { + const message = {}; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: RowType): RowType { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* org.apache.beam.model.pipeline.v1.Schema schema */ 1: + message.schema = Schema.internalBinaryRead(reader, reader.uint32(), options, message.schema); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: RowType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* org.apache.beam.model.pipeline.v1.Schema schema = 1; */ + if (message.schema) + Schema.internalBinaryWrite(message.schema, writer.tag(1, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.RowType + */ +export const RowType = new RowType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class LogicalType$Type extends MessageType { + constructor() { + super("org.apache.beam.model.pipeline.v1.LogicalType", [ + { no: 1, name: "urn", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, + { no: 2, name: "payload", kind: "scalar", T: 12 /*ScalarType.BYTES*/ }, + { no: 3, name: "representation", kind: "message", T: () => FieldType }, + { no: 4, name: "argument_type", kind: "message", T: () => FieldType }, + { no: 5, name: "argument", kind: "message", T: () => FieldValue } + ]); + } + create(value?: PartialMessage): LogicalType { + const message = { urn: "", payload: new Uint8Array(0) }; + globalThis.Object.defineProperty(message, MESSAGE_TYPE, { enumerable: false, value: this }); + if (value !== undefined) + reflectionMergePartial(this, message, value); + return message; + } + internalBinaryRead(reader: IBinaryReader, length: number, options: BinaryReadOptions, target?: LogicalType): LogicalType { + let message = target ?? this.create(), end = reader.pos + length; + while (reader.pos < end) { + let [fieldNo, wireType] = reader.tag(); + switch (fieldNo) { + case /* string urn */ 1: + message.urn = reader.string(); + break; + case /* bytes payload */ 2: + message.payload = reader.bytes(); + break; + case /* org.apache.beam.model.pipeline.v1.FieldType representation */ 3: + message.representation = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.representation); + break; + case /* org.apache.beam.model.pipeline.v1.FieldType argument_type */ 4: + message.argumentType = FieldType.internalBinaryRead(reader, reader.uint32(), options, message.argumentType); + break; + case /* org.apache.beam.model.pipeline.v1.FieldValue argument */ 5: + message.argument = FieldValue.internalBinaryRead(reader, reader.uint32(), options, message.argument); + break; + default: + let u = options.readUnknownField; + if (u === "throw") + throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); + let d = reader.skip(wireType); + if (u !== false) + (u === true ? UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); + } + } + return message; + } + internalBinaryWrite(message: LogicalType, writer: IBinaryWriter, options: BinaryWriteOptions): IBinaryWriter { + /* string urn = 1; */ + if (message.urn !== "") + writer.tag(1, WireType.LengthDelimited).string(message.urn); + /* bytes payload = 2; */ + if (message.payload.length) + writer.tag(2, WireType.LengthDelimited).bytes(message.payload); + /* org.apache.beam.model.pipeline.v1.FieldType representation = 3; */ + if (message.representation) + FieldType.internalBinaryWrite(message.representation, writer.tag(3, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FieldType argument_type = 4; */ + if (message.argumentType) + FieldType.internalBinaryWrite(message.argumentType, writer.tag(4, WireType.LengthDelimited).fork(), options).join(); + /* org.apache.beam.model.pipeline.v1.FieldValue argument = 5; */ + if (message.argument) + FieldValue.internalBinaryWrite(message.argument, writer.tag(5, WireType.LengthDelimited).fork(), options).join(); + let u = options.writeUnknownFields; + if (u !== false) + (u == true ? UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); + return writer; + } +} +/** + * @generated MessageType for protobuf message org.apache.beam.model.pipeline.v1.LogicalType + */ +export const LogicalType = new LogicalType$Type(); +// @generated message type with reflection information, may provide speed optimized methods +class Option$Type extends MessageType