diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml
new file mode 100644
index 0000000..2e7e23c
--- /dev/null
+++ b/.github/workflows/python.yml
@@ -0,0 +1,136 @@
+name: Waylay Queries CI
+
+on:
+ push:
+ branches: [ "main", "staging", "release/**" ]
+ tags:
+ - '**'
+ pull_request:
+ branches: [ "main", "staging", "release/**" ]
+
+jobs:
+ test:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Git with token to access other private repositories
+ run: git config --global url."https://${{ secrets.OTOMATOR_PAT }}@github".insteadOf https://github
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ cache: 'pip'
+ cache-dependency-path: '**/*requirements*.txt'
+ - name: install dependencies
+ run: make ci-install-types
+ - name: code qa
+ run: make ci-code-qa
+ - name: unit tests
+ run: make ci-test
+
+ test-minimal:
+ runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ python-version: ["3.9", "3.10", "3.11", "3.12"]
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Git with token to access other private repositories
+ run: git config --global url."https://${{ secrets.OTOMATOR_PAT }}@github".insteadOf https://github
+ - name: Setup Python ${{ matrix.python-version }}
+ uses: actions/setup-python@v5
+ with:
+ python-version: ${{ matrix.python-version }}
+ cache: 'pip'
+ cache-dependency-path: '**/*requirements*.txt'
+ - name: install dependencies
+ run: make ci-install-api
+ - name: unit tests
+ run: make ci-test
+
+ publish-api:
+ needs:
+ - test
+ - test-minimal
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
+
+ defaults:
+ run:
+ working-directory: ./waylay-sdk-queries
+
+ name: Publish api package to PyPI
+ runs-on: ubuntu-latest
+ environment:
+ name: pypi
+ url: https://pypi.org/p/waylay-sdk-queries
+
+ permissions:
+ actions: read
+ contents: read
+ id-token: write # IMPORTANT: mandatory for trusted publishing
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - name: Install pypa/build
+ run: pip install build
+ - name: check version
+ id: check-tag
+ uses: samuelcolvin/check-python-version@v4.1
+ with:
+ version_file_path: waylay-sdk-queries/src/waylay/services/queries/service/__init__.py
+ - name: Build
+ run: python3 -m build
+ - name: Publish to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ packages-dir: waylay-sdk-queries/dist
+
+ publish-types:
+ needs:
+ - test
+ - test-minimal
+ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags')
+
+ defaults:
+ run:
+ working-directory: ./waylay-sdk-queries-types
+
+ name: Publish types package to PyPI
+ runs-on: ubuntu-latest
+ environment:
+ name: pypi
+ url: https://pypi.org/p/waylay-sdk-queries-types
+
+
+ permissions:
+ actions: read
+ contents: read
+ id-token: write # IMPORTANT: mandatory for trusted publishing
+
+ steps:
+ - uses: actions/checkout@v4
+ - name: Set up Python
+ uses: actions/setup-python@v5
+ with:
+ python-version: "3.11"
+ - name: Install pypa/build
+ run: pip install build
+ - name: check version
+ id: check-tag
+ uses: samuelcolvin/check-python-version@v4.1
+ with:
+ version_file_path: waylay-sdk-queries-types/src/waylay/services/queries/models/__init__.py
+ - name: Build
+ run: python3 -m build
+ - name: Publish to PyPI
+ uses: pypa/gh-action-pypi-publish@release/v1
+ with:
+ packages-dir: waylay-sdk-queries-types/dist
\ No newline at end of file
diff --git a/.openapi-generator-ignore b/.openapi-generator-ignore
new file mode 100644
index 0000000..7484ee5
--- /dev/null
+++ b/.openapi-generator-ignore
@@ -0,0 +1,23 @@
+# OpenAPI Generator Ignore
+# Generated by openapi-generator https://github.com/openapitools/openapi-generator
+
+# Use this file to prevent files from being overwritten by the generator.
+# The patterns follow closely to .gitignore or .dockerignore.
+
+# As an example, the C# client generator defines ApiClient.cs.
+# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line:
+#ApiClient.cs
+
+# You can match any string of characters against a directory, file or extension with a single asterisk (*):
+#foo/*/qux
+# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux
+
+# You can recursively match patterns against a directory, file or extension with a double asterisk (**):
+#foo/**/qux
+# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux
+
+# You can also negate patterns with an exclamation (!).
+# For example, you can ignore all files in a docs folder with the file extension .md:
+#docs/*.md
+# Then explicitly reverse the ignore rule for a single file:
+#!docs/README.md
diff --git a/.openapi-generator/FILES b/.openapi-generator/FILES
new file mode 100644
index 0000000..10e55f6
--- /dev/null
+++ b/.openapi-generator/FILES
@@ -0,0 +1,231 @@
+.github/workflows/python.yml
+.openapi-generator-ignore
+LICENSE.txt
+Makefile
+README.md
+docs/AggregationByResourceAndMetric.md
+docs/AggregationByResourceOrMetric.md
+docs/AggregationMethod.md
+docs/AggregationMethodOneOf.md
+docs/AggregationMethodOneOf1.md
+docs/AggregationMethodOneOf2.md
+docs/AggregationMethodOneOf3.md
+docs/AggregationMethodOneOf4.md
+docs/AggregationMethodOneOf5.md
+docs/AggregationMethodOneOf6.md
+docs/AggregationMethodOneOf7.md
+docs/AggregationMethodOneOf8.md
+docs/AggregationsInner.md
+docs/AlignAt.md
+docs/AlignShift.md
+docs/Alignment.md
+docs/AlignmentGridInterval.md
+docs/AlignmentTimezone.md
+docs/CauseException.md
+docs/ColumnDataSet.md
+docs/ColumnDataSetDataAxis.md
+docs/ColumnHeader.md
+docs/ColumnHeadersInner.md
+docs/DataAxisOption.md
+docs/DataSetAttributes.md
+docs/DataSetWindow.md
+docs/Datum.md
+docs/DefaultAggregation.md
+docs/DefaultInterpolation.md
+docs/DeleteResponse.md
+docs/Embeddings.md
+docs/ExecuteApi.md
+docs/FromOverride.md
+docs/GroupingInterval.md
+docs/GroupingIntervalOverride.md
+docs/GroupingIntervalOverrideOneOf.md
+docs/HALLink.md
+docs/HALLinkMethod.md
+docs/HALLinkRole.md
+docs/HTTPValidationError.md
+docs/HeaderArrayOption.md
+docs/Hierarchical.md
+docs/Interpolation.md
+docs/InterpolationMethod.md
+docs/InterpolationMethodOneOf.md
+docs/InterpolationMethodOneOf1.md
+docs/InterpolationMethodOneOf10.md
+docs/InterpolationMethodOneOf11.md
+docs/InterpolationMethodOneOf12.md
+docs/InterpolationMethodOneOf13.md
+docs/InterpolationMethodOneOf2.md
+docs/InterpolationMethodOneOf3.md
+docs/InterpolationMethodOneOf4.md
+docs/InterpolationMethodOneOf5.md
+docs/InterpolationMethodOneOf6.md
+docs/InterpolationMethodOneOf7.md
+docs/InterpolationMethodOneOf8.md
+docs/InterpolationMethodOneOf9.md
+docs/InterpolationSpec.md
+docs/Links.md
+docs/LocationInner.md
+docs/ManageApi.md
+docs/Message.md
+docs/MessageArguments.md
+docs/MessageLevel.md
+docs/MessageProperties.md
+docs/ObjectData.md
+docs/ObjectDataSet.md
+docs/ObjectDataValue.md
+docs/Operation.md
+docs/QueriesListResponse.md
+docs/QueryDefinition.md
+docs/QueryEntityInput.md
+docs/QueryExecutionMessage.md
+docs/QueryExecutionMessageLevel.md
+docs/QueryHALLinks.md
+docs/QueryInput.md
+docs/QueryListHALLinks.md
+docs/QueryListItem.md
+docs/QueryOutput.md
+docs/QueryResponse.md
+docs/QueryResult.md
+docs/QueryUpdateInput.md
+docs/Render.md
+docs/Render1.md
+docs/RenderMode.md
+docs/RenderModeOneOf.md
+docs/RenderModeOneOf1.md
+docs/RenderModeOneOf2.md
+docs/RenderModeOneOf3.md
+docs/RenderModeOneOf4.md
+docs/RenderModeOneOf5.md
+docs/RenderModeOneOf6.md
+docs/RenderModeOneOf7.md
+docs/RenderModeOneOf8.md
+docs/RenderModeOneOf9.md
+docs/ResponseDataSet.md
+docs/RowDataSet.md
+docs/RowDataSetDataAxis.md
+docs/RowHeader.md
+docs/RowHeadersInner.md
+docs/SeriesDataSet.md
+docs/SeriesSpec.md
+docs/StatusApi.md
+docs/TimeWindowFrom.md
+docs/TimeWindowUntil.md
+docs/ValidationError.md
+docs/Window.md
+docs/WindowOverride.md
+pyproject.toml
+requirements.txt
+test/__init__.py
+test/api/__init__.py
+test/api/execute_api_test.py
+test/api/manage_api_test.py
+test/api/status_api_test.py
+test/conftest.py
+test/openapi.py
+test/types/__init__.py
+test/types/aggregation_by_resource_and_metric_stub.py
+test/types/aggregation_by_resource_or_metric_stub.py
+test/types/aggregation_method_one_of1_stub.py
+test/types/aggregation_method_one_of2_stub.py
+test/types/aggregation_method_one_of3_stub.py
+test/types/aggregation_method_one_of4_stub.py
+test/types/aggregation_method_one_of5_stub.py
+test/types/aggregation_method_one_of6_stub.py
+test/types/aggregation_method_one_of7_stub.py
+test/types/aggregation_method_one_of8_stub.py
+test/types/aggregation_method_one_of_stub.py
+test/types/aggregation_method_stub.py
+test/types/aggregations_inner_stub.py
+test/types/align_at_stub.py
+test/types/align_shift_stub.py
+test/types/alignment_grid_interval_stub.py
+test/types/alignment_stub.py
+test/types/alignment_timezone_stub.py
+test/types/cause_exception_stub.py
+test/types/column_data_set_data_axis_stub.py
+test/types/column_data_set_stub.py
+test/types/column_header_stub.py
+test/types/column_headers_inner_stub.py
+test/types/data_axis_option_stub.py
+test/types/data_set_attributes_stub.py
+test/types/data_set_window_stub.py
+test/types/datum_stub.py
+test/types/default_aggregation_stub.py
+test/types/default_interpolation_stub.py
+test/types/delete_response_stub.py
+test/types/embeddings_stub.py
+test/types/from_override_stub.py
+test/types/grouping_interval_override_one_of_stub.py
+test/types/grouping_interval_override_stub.py
+test/types/grouping_interval_stub.py
+test/types/hal_link_method_stub.py
+test/types/hal_link_role_stub.py
+test/types/hal_link_stub.py
+test/types/header_array_option_stub.py
+test/types/hierarchical_stub.py
+test/types/http_validation_error_stub.py
+test/types/interpolation_method_one_of10_stub.py
+test/types/interpolation_method_one_of11_stub.py
+test/types/interpolation_method_one_of12_stub.py
+test/types/interpolation_method_one_of13_stub.py
+test/types/interpolation_method_one_of1_stub.py
+test/types/interpolation_method_one_of2_stub.py
+test/types/interpolation_method_one_of3_stub.py
+test/types/interpolation_method_one_of4_stub.py
+test/types/interpolation_method_one_of5_stub.py
+test/types/interpolation_method_one_of6_stub.py
+test/types/interpolation_method_one_of7_stub.py
+test/types/interpolation_method_one_of8_stub.py
+test/types/interpolation_method_one_of9_stub.py
+test/types/interpolation_method_one_of_stub.py
+test/types/interpolation_method_stub.py
+test/types/interpolation_spec_stub.py
+test/types/interpolation_stub.py
+test/types/links_stub.py
+test/types/location_inner_stub.py
+test/types/message_arguments_stub.py
+test/types/message_level_stub.py
+test/types/message_properties_stub.py
+test/types/message_stub.py
+test/types/object_data_set_stub.py
+test/types/object_data_stub.py
+test/types/object_data_value_stub.py
+test/types/queries_list_response_stub.py
+test/types/query_definition_stub.py
+test/types/query_entity_input_stub.py
+test/types/query_execution_message_level_stub.py
+test/types/query_execution_message_stub.py
+test/types/query_hal_links_stub.py
+test/types/query_input_stub.py
+test/types/query_list_hal_links_stub.py
+test/types/query_list_item_stub.py
+test/types/query_output_stub.py
+test/types/query_response_stub.py
+test/types/query_result_stub.py
+test/types/query_update_input_stub.py
+test/types/render1_stub.py
+test/types/render_mode_one_of1_stub.py
+test/types/render_mode_one_of2_stub.py
+test/types/render_mode_one_of3_stub.py
+test/types/render_mode_one_of4_stub.py
+test/types/render_mode_one_of5_stub.py
+test/types/render_mode_one_of6_stub.py
+test/types/render_mode_one_of7_stub.py
+test/types/render_mode_one_of8_stub.py
+test/types/render_mode_one_of9_stub.py
+test/types/render_mode_one_of_stub.py
+test/types/render_mode_stub.py
+test/types/render_stub.py
+test/types/response_data_set_stub.py
+test/types/row_data_set_data_axis_stub.py
+test/types/row_data_set_stub.py
+test/types/row_header_stub.py
+test/types/row_headers_inner_stub.py
+test/types/series_data_set_stub.py
+test/types/series_spec_stub.py
+test/types/time_window_from_stub.py
+test/types/time_window_until_stub.py
+test/types/validation_error_stub.py
+test/types/window_override_stub.py
+test/types/window_stub.py
+waylay-sdk-queries-types/README.md
+waylay-sdk-queries/README.md
diff --git a/.openapi-generator/VERSION b/.openapi-generator/VERSION
new file mode 100644
index 0000000..1985849
--- /dev/null
+++ b/.openapi-generator/VERSION
@@ -0,0 +1 @@
+7.7.0
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..2796771
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,13 @@
+ISC License (ISC)
+Copyright 2024, Waylay
+
+Permission to use, copy, modify, and/or distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright notice
+and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
\ No newline at end of file
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..9ad819a
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,158 @@
+printMsg=printf "\033[36m\033[1m%-15s\033[0m\033[36m %-30s\033[0m\n"
+
+.PHONY: help test
+## use triple hashes ### to indicate main build targets
+help:
+ @awk 'BEGIN {FS = ":[^#]*? ### "} /^[a-zA-Z0-9_\-\.]+:[^#]* ### / {printf "\033[1m\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
+ @awk 'BEGIN {FS = ":[^#]*? ## "} /^[a-zA-Z0-9_\-\.]+:[^#]* ## / {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' $(MAKEFILE_LIST)
+.DEFAULT_GOAL := help
+
+SERVICE_NAME=queries
+
+API_FOLDER=waylay-sdk-${SERVICE_NAME}
+API_SRC=${API_FOLDER}/src
+TYPES_FOLDER=waylay-sdk-${SERVICE_NAME}-types
+TYPES_SRC=${TYPES_FOLDER}/src
+TEST_FOLDER=test
+TEST_RUN_FOLDER=${TEST_FOLDER}/_run
+
+CMD_FORMAT=ruff format --no-respect-gitignore --preview
+CMD_FIX=ruff check --fix --unsafe-fixes --no-respect-gitignore --preview
+CMD_CHECK=ruff check --no-respect-gitignore --preview
+
+# disables test QA unless set to empty string
+TEST_QA_PREFIX?=echo DISABLED
+
+VENV_DIR=.venv
+VENV_TYPES_DIR=.venv/types
+VENV_TYPES_ACTIVATE_CMD=${VENV_TYPES_DIR}/bin/activate
+VENV_TYPES_ACTIVATE=. ${VENV_TYPES_ACTIVATE_CMD}
+
+${VENV_TYPES_ACTIVATE_CMD}:
+ python3 -m venv ${VENV_TYPES_DIR}
+ ${VENV_TYPES_ACTIVATE} && make exec-dev-install-types
+
+VENV_NOTYPES_DIR=.venv/notypes
+VENV_NOTYPES_ACTIVATE_CMD=${VENV_NOTYPES_DIR}/bin/activate
+VENV_NOTYPES_ACTIVATE=. ${VENV_NOTYPES_ACTIVATE_CMD}
+
+${VENV_NOTYPES_ACTIVATE_CMD}:
+ python3 -m venv ${VENV_NOTYPES_DIR}
+ ${VENV_NOTYPES_ACTIVATE} && make exec-dev-install-api
+
+
+install-types: ${VENV_TYPES_ACTIVATE_CMD}
+
+install-notypes: ${VENV_NOTYPES_ACTIVATE_CMD}
+
+install: install-types
+
+clean:
+ rm -fr ${VENV_DIR}
+ rm -fr .*_cache
+ rm -fr */.*_cache
+ rm -fr */src/*.egg-info
+ rm -fr **/__pycache__
+ rm -rf ${TEST_RUN_FOLDER}
+
+lint: install ### Run linting checks
+ @${VENV_TYPES_ACTIVATE} && make exec-lint
+
+typecheck: install ### Run type checks
+ @${VENV_TYPES_ACTIVATE} && make exec-typecheck
+
+code-qa: install ### perform code quality checks
+ @${VENV_TYPES_ACTIVATE} && make exec-code-qa
+
+test: test-notypes test-types ### Run unit tests with and without types installed
+
+test-types: install-types ### Run unit tests with types installed
+ @${VENV_TYPES_ACTIVATE} && make exec-test
+ @${printMsg} 'tests with types package installed' 'OK'
+
+test-notypes: install-notypes ### Run unit tests with types installed
+ @${VENV_NOTYPES_ACTIVATE} && make exec-test
+ @${printMsg} 'tests without types package installed' 'OK'
+
+format: install ### Format code
+ @${VENV_TYPES_ACTIVATE} && make exec-format
+
+exec-lint: ### Run linting checks
+ cd ${API_FOLDER} && ${CMD_CHECK}
+ @${printMsg} 'lint ${API_FOLDER}' 'OK'
+ cd ${TYPES_FOLDER} && ${CMD_CHECK}
+ @${printMsg} 'lint ${TYPES_FOLDER}' 'OK'
+ ${CMD_CHECK}
+ @${printMsg} 'lint test' 'OK'
+
+exec-typecheck: ### Run type checks
+ cd ${API_SRC}/ && mypy --namespace-packages -p waylay
+ @${printMsg} 'typecheck api' 'OK'
+ cd ${TYPES_SRC}/ && mypy --namespace-packages -p waylay
+ @${printMsg} 'typecheck types' 'OK'
+ ${TEST_QA_PREFIX} mypy ${TEST_FOLDER}
+ @${printMsg} 'typecheck test' '${TEST_QA_PREFIX} OK'
+
+${TEST_RUN_FOLDER}: # workaround for JSF schema resolution
+ mkdir -p ${TEST_RUN_FOLDER}
+ cp -r openapi ${TEST_RUN_FOLDER}/openapi
+ # let JSF loader resolve './xx.yaml' to 'openapi/xx.yaml.json'
+ # and make contentEncoding=base64 work
+ cd ${TEST_RUN_FOLDER}/openapi && for f in `ls *.yaml`; \
+ do \
+ cat $$f | yq 'tojson' | sed -e 's/"base64"/"base-64"/' > $$f.json; \
+ cd .. && ln -s openapi/$$f.json $$f; cd openapi; \
+ done
+
+exec-test: ${TEST_RUN_FOLDER} ### Run unit tests
+ cd ${TEST_RUN_FOLDER} && pytest ..
+
+exec-format: ### Format code
+ ${CMD_FIX} ${API_FOLDER}
+ ${CMD_FORMAT} ${API_FOLDER}
+ @${printMsg} 'format api' 'OK'
+ ${CMD_FIX} ${TYPES_FOLDER}
+ ${CMD_FORMAT} ${TYPES_FOLDER}
+ @${printMsg} 'format types' 'OK'
+ ${CMD_FIX} ${TEST_FOLDER}
+ ${CMD_FORMAT} ${TEST_FOLDER}
+ @${printMsg} 'format test' 'OK'
+
+exec-code-qa: exec-lint exec-typecheck ### perform code quality checks
+
+ci-code-qa: exec-code-qa ### perform ci code quality checks
+
+exec-dev-install-types: exec-dev-install-api ### Install the development environment including types
+ pip install -e ${TYPES_FOLDER}[dev]
+
+exec-dev-install-api: _install_requirements ### Install the minimal development environment
+ pip install -e ${API_FOLDER}[dev]
+
+ci-install-types: ci-install-api ### Install the environment including types with frozen requirements
+ pip install './${TYPES_FOLDER}[dev]'
+
+ci-install-api: _install_requirements ### Install the minimal environment with frozen requirements
+ pip install './${API_FOLDER}[dev]'
+
+ci-test: exec-test ### perform ci unit tests
+
+_install_requirements:
+ pip install --upgrade pip
+ pip install -r requirements.txt
+
+_GENERATED_FOLDER?=.
+_GENERATED_FILES=.openapi-generator/FILES
+
+_clean_gen: ### Removes all code-generated files
+ @test -s ${_GENERATED_FOLDER}/${_GENERATED_FILES} || ( \
+ ${printMsg} 'clean-generated ${_GENERATED_FOLDER}' 'FAILED (no ${_GENERATED_FILES}).' \
+ && exit -1 \
+ )
+ cd ${_GENERATED_FOLDER} && xargs rm -f < ${_GENERATED_FILES} && find . -empty -type d -delete
+ @${printMsg} 'clean-generated ${_GENERATED_FOLDER}' 'OK'
+
+clean-generated: ### Removes all code-generated files
+ @make clean
+ @_GENERATED_FOLDER=${TYPES_FOLDER} make _clean_gen
+ @_GENERATED_FOLDER=${API_FOLDER} make _clean_gen
+ @_GENERATED_FOLDER='.' make _clean_gen
diff --git a/README.md b/README.md
index e69de29..48f1c50 100644
--- a/README.md
+++ b/README.md
@@ -0,0 +1,192 @@
+# Waylay Queries Service
+
+Execute and store queries on the Waylay timeseries.
+
+Protocol version: v1.
+
+This Python package is automatically generated based on the
+Waylay Queries OpenAPI specification (API version: 0.5.0)
+For more information, please visit [the openapi specification](https://docs.waylay.io/openapi/public/redocly/queries.html).
+
+It consists of two sub-packages that are both plugins for the waylay-sdk-core package.
+- The `waylay-sdk-queries` sub-package contains the Queries api methods.
+- The `waylay-sdk-queries-types` sub-package is an extension that contains the typed model classes for all path params, query params, body params and responses for each of the api methods in `waylay-sdk-queries`.
+
+## Requirements.
+This package requires Python 3.9+.
+
+## Installation
+
+Normally this package is installed together with support for other services using the [waylay-sdk](https://pypi.org/project/waylay-sdk/) umbrella package:
+* `pip install waylay-sdk` will install `waylay-sdk-queries` together with the SDK api packages for other services.
+* `pip install waylay-sdk[types-queries]` will additionally install the types package `waylay-sdk-queries-types`.
+* `pip install waylay-sdk[types]` will install the types packages for this and all other services.
+
+Alternatively, you can install support for this _queries_ service only, installing or extending an existing [waylay-sdk-core](https://pypi.org/project/waylay-sdk-core/):
+
+- `pip install waylay-sdk-queries` to only install api support for _queries_.
+- `pip install waylay-sdk-queries[types]` to additionally install type support for _queries_.
+
+## Usage
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_input import QueryInput
+from waylay.services.queries.models.query_result import QueryResult
+try:
+ # Execute Query
+ # calls `POST /queries/v1/queries/v1/data`
+ api_response = await waylay_client.queries.execute.execute(
+ # query parameters:
+ query = {
+ 'resource': '13efb488-75ac-4dac-828a-d49c5c2ebbfc'
+ 'metric': 'temperature'
+ },
+ # json data: use a generated model or a json-serializable python data structure (dict, list)
+ json = waylay.services.queries.QueryInput() # QueryInput |
+ headers = {
+ 'accept': 'accept_example',
+ },
+ )
+ print("The response of queries.execute.execute:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.execute.execute: %s\n" % e)
+```
+
+
+For more information, please visit the [Waylay API documentation](https://docs.waylay.io/#/api/?id=software-development-kits).
+
+## Documentation for API Endpoints
+
+All URIs are relative to *https://api.waylay.io*
+
+Class | Method | HTTP request | Description
+------------ | ------------- | ------------- | -------------
+*ExecuteApi* | [**execute_by_name**](docs/ExecuteApi.md#execute_by_name) | **GET** /queries/v1/queries/v1/data/{query_name} | Execute Named Query
+*ExecuteApi* | [**execute**](docs/ExecuteApi.md#execute) | **POST** /queries/v1/queries/v1/data | Execute Query
+*ManageApi* | [**create**](docs/ManageApi.md#create) | **POST** /queries/v1/queries/v1/query | Post Query
+*ManageApi* | [**get**](docs/ManageApi.md#get) | **GET** /queries/v1/queries/v1/query/{query_name} | Get Query
+*ManageApi* | [**list**](docs/ManageApi.md#list) | **GET** /queries/v1/queries/v1/query | List Queries
+*ManageApi* | [**remove**](docs/ManageApi.md#remove) | **DELETE** /queries/v1/queries/v1/query/{query_name} | Remove Query
+*ManageApi* | [**update**](docs/ManageApi.md#update) | **PUT** /queries/v1/queries/v1/query/{query_name} | Update Query
+*StatusApi* | [**get**](docs/StatusApi.md#get) | **GET** /queries/v1/queries/v1 | Get Version And Health
+
+
+## Documentation For Models
+
+ - [AggregationByResourceAndMetric](docs/AggregationByResourceAndMetric.md)
+ - [AggregationByResourceOrMetric](docs/AggregationByResourceOrMetric.md)
+ - [AggregationMethod](docs/AggregationMethod.md)
+ - [AggregationMethodOneOf](docs/AggregationMethodOneOf.md)
+ - [AggregationMethodOneOf1](docs/AggregationMethodOneOf1.md)
+ - [AggregationMethodOneOf2](docs/AggregationMethodOneOf2.md)
+ - [AggregationMethodOneOf3](docs/AggregationMethodOneOf3.md)
+ - [AggregationMethodOneOf4](docs/AggregationMethodOneOf4.md)
+ - [AggregationMethodOneOf5](docs/AggregationMethodOneOf5.md)
+ - [AggregationMethodOneOf6](docs/AggregationMethodOneOf6.md)
+ - [AggregationMethodOneOf7](docs/AggregationMethodOneOf7.md)
+ - [AggregationMethodOneOf8](docs/AggregationMethodOneOf8.md)
+ - [AggregationsInner](docs/AggregationsInner.md)
+ - [AlignAt](docs/AlignAt.md)
+ - [AlignShift](docs/AlignShift.md)
+ - [Alignment](docs/Alignment.md)
+ - [AlignmentGridInterval](docs/AlignmentGridInterval.md)
+ - [AlignmentTimezone](docs/AlignmentTimezone.md)
+ - [CauseException](docs/CauseException.md)
+ - [ColumnDataSet](docs/ColumnDataSet.md)
+ - [ColumnDataSetDataAxis](docs/ColumnDataSetDataAxis.md)
+ - [ColumnHeader](docs/ColumnHeader.md)
+ - [ColumnHeadersInner](docs/ColumnHeadersInner.md)
+ - [DataAxisOption](docs/DataAxisOption.md)
+ - [DataSetAttributes](docs/DataSetAttributes.md)
+ - [DataSetWindow](docs/DataSetWindow.md)
+ - [Datum](docs/Datum.md)
+ - [DefaultAggregation](docs/DefaultAggregation.md)
+ - [DefaultInterpolation](docs/DefaultInterpolation.md)
+ - [DeleteResponse](docs/DeleteResponse.md)
+ - [Embeddings](docs/Embeddings.md)
+ - [FromOverride](docs/FromOverride.md)
+ - [GroupingInterval](docs/GroupingInterval.md)
+ - [GroupingIntervalOverride](docs/GroupingIntervalOverride.md)
+ - [GroupingIntervalOverrideOneOf](docs/GroupingIntervalOverrideOneOf.md)
+ - [HALLink](docs/HALLink.md)
+ - [HALLinkMethod](docs/HALLinkMethod.md)
+ - [HALLinkRole](docs/HALLinkRole.md)
+ - [HTTPValidationError](docs/HTTPValidationError.md)
+ - [HeaderArrayOption](docs/HeaderArrayOption.md)
+ - [Hierarchical](docs/Hierarchical.md)
+ - [Interpolation](docs/Interpolation.md)
+ - [InterpolationMethod](docs/InterpolationMethod.md)
+ - [InterpolationMethodOneOf](docs/InterpolationMethodOneOf.md)
+ - [InterpolationMethodOneOf1](docs/InterpolationMethodOneOf1.md)
+ - [InterpolationMethodOneOf10](docs/InterpolationMethodOneOf10.md)
+ - [InterpolationMethodOneOf11](docs/InterpolationMethodOneOf11.md)
+ - [InterpolationMethodOneOf12](docs/InterpolationMethodOneOf12.md)
+ - [InterpolationMethodOneOf13](docs/InterpolationMethodOneOf13.md)
+ - [InterpolationMethodOneOf2](docs/InterpolationMethodOneOf2.md)
+ - [InterpolationMethodOneOf3](docs/InterpolationMethodOneOf3.md)
+ - [InterpolationMethodOneOf4](docs/InterpolationMethodOneOf4.md)
+ - [InterpolationMethodOneOf5](docs/InterpolationMethodOneOf5.md)
+ - [InterpolationMethodOneOf6](docs/InterpolationMethodOneOf6.md)
+ - [InterpolationMethodOneOf7](docs/InterpolationMethodOneOf7.md)
+ - [InterpolationMethodOneOf8](docs/InterpolationMethodOneOf8.md)
+ - [InterpolationMethodOneOf9](docs/InterpolationMethodOneOf9.md)
+ - [InterpolationSpec](docs/InterpolationSpec.md)
+ - [Links](docs/Links.md)
+ - [LocationInner](docs/LocationInner.md)
+ - [Message](docs/Message.md)
+ - [MessageArguments](docs/MessageArguments.md)
+ - [MessageLevel](docs/MessageLevel.md)
+ - [MessageProperties](docs/MessageProperties.md)
+ - [ObjectData](docs/ObjectData.md)
+ - [ObjectDataSet](docs/ObjectDataSet.md)
+ - [ObjectDataValue](docs/ObjectDataValue.md)
+ - [QueriesListResponse](docs/QueriesListResponse.md)
+ - [QueryDefinition](docs/QueryDefinition.md)
+ - [QueryEntityInput](docs/QueryEntityInput.md)
+ - [QueryExecutionMessage](docs/QueryExecutionMessage.md)
+ - [QueryExecutionMessageLevel](docs/QueryExecutionMessageLevel.md)
+ - [QueryHALLinks](docs/QueryHALLinks.md)
+ - [QueryInput](docs/QueryInput.md)
+ - [QueryListHALLinks](docs/QueryListHALLinks.md)
+ - [QueryListItem](docs/QueryListItem.md)
+ - [QueryOutput](docs/QueryOutput.md)
+ - [QueryResponse](docs/QueryResponse.md)
+ - [QueryResult](docs/QueryResult.md)
+ - [QueryUpdateInput](docs/QueryUpdateInput.md)
+ - [Render](docs/Render.md)
+ - [Render1](docs/Render1.md)
+ - [RenderMode](docs/RenderMode.md)
+ - [RenderModeOneOf](docs/RenderModeOneOf.md)
+ - [RenderModeOneOf1](docs/RenderModeOneOf1.md)
+ - [RenderModeOneOf2](docs/RenderModeOneOf2.md)
+ - [RenderModeOneOf3](docs/RenderModeOneOf3.md)
+ - [RenderModeOneOf4](docs/RenderModeOneOf4.md)
+ - [RenderModeOneOf5](docs/RenderModeOneOf5.md)
+ - [RenderModeOneOf6](docs/RenderModeOneOf6.md)
+ - [RenderModeOneOf7](docs/RenderModeOneOf7.md)
+ - [RenderModeOneOf8](docs/RenderModeOneOf8.md)
+ - [RenderModeOneOf9](docs/RenderModeOneOf9.md)
+ - [ResponseDataSet](docs/ResponseDataSet.md)
+ - [RowDataSet](docs/RowDataSet.md)
+ - [RowDataSetDataAxis](docs/RowDataSetDataAxis.md)
+ - [RowHeader](docs/RowHeader.md)
+ - [RowHeadersInner](docs/RowHeadersInner.md)
+ - [SeriesDataSet](docs/SeriesDataSet.md)
+ - [SeriesSpec](docs/SeriesSpec.md)
+ - [TimeWindowFrom](docs/TimeWindowFrom.md)
+ - [TimeWindowUntil](docs/TimeWindowUntil.md)
+ - [ValidationError](docs/ValidationError.md)
+ - [Window](docs/Window.md)
+ - [WindowOverride](docs/WindowOverride.md)
+
diff --git a/docs/AggregationByResourceAndMetric.md b/docs/AggregationByResourceAndMetric.md
new file mode 100644
index 0000000..573a1fb
--- /dev/null
+++ b/docs/AggregationByResourceAndMetric.md
@@ -0,0 +1,28 @@
+# AggregationByResourceAndMetric
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.aggregation_by_resource_and_metric import AggregationByResourceAndMetric
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AggregationByResourceAndMetric from a JSON string
+aggregation_by_resource_and_metric_instance = AggregationByResourceAndMetric.from_json(json)
+# print the JSON string representation of the object
+print AggregationByResourceAndMetric.to_json()
+
+# convert the object into a dict
+aggregation_by_resource_and_metric_dict = aggregation_by_resource_and_metric_instance.to_dict()
+# create an instance of AggregationByResourceAndMetric from a dict
+aggregation_by_resource_and_metric_form_dict = aggregation_by_resource_and_metric.from_dict(aggregation_by_resource_and_metric_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationByResourceOrMetric.md b/docs/AggregationByResourceOrMetric.md
new file mode 100644
index 0000000..61fc31a
--- /dev/null
+++ b/docs/AggregationByResourceOrMetric.md
@@ -0,0 +1,28 @@
+# AggregationByResourceOrMetric
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.aggregation_by_resource_or_metric import AggregationByResourceOrMetric
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AggregationByResourceOrMetric from a JSON string
+aggregation_by_resource_or_metric_instance = AggregationByResourceOrMetric.from_json(json)
+# print the JSON string representation of the object
+print AggregationByResourceOrMetric.to_json()
+
+# convert the object into a dict
+aggregation_by_resource_or_metric_dict = aggregation_by_resource_or_metric_instance.to_dict()
+# create an instance of AggregationByResourceOrMetric from a dict
+aggregation_by_resource_or_metric_form_dict = aggregation_by_resource_or_metric.from_dict(aggregation_by_resource_or_metric_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethod.md b/docs/AggregationMethod.md
new file mode 100644
index 0000000..3d29661
--- /dev/null
+++ b/docs/AggregationMethod.md
@@ -0,0 +1,28 @@
+# AggregationMethod
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.aggregation_method import AggregationMethod
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AggregationMethod from a JSON string
+aggregation_method_instance = AggregationMethod.from_json(json)
+# print the JSON string representation of the object
+print AggregationMethod.to_json()
+
+# convert the object into a dict
+aggregation_method_dict = aggregation_method_instance.to_dict()
+# create an instance of AggregationMethod from a dict
+aggregation_method_form_dict = aggregation_method.from_dict(aggregation_method_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf.md b/docs/AggregationMethodOneOf.md
new file mode 100644
index 0000000..61e943b
--- /dev/null
+++ b/docs/AggregationMethodOneOf.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf
+
+Use the first value (in time) to represent all data for the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf1.md b/docs/AggregationMethodOneOf1.md
new file mode 100644
index 0000000..93ce2ac
--- /dev/null
+++ b/docs/AggregationMethodOneOf1.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf1
+
+Use the last value (in time) to represent all data for the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf2.md b/docs/AggregationMethodOneOf2.md
new file mode 100644
index 0000000..8e042ed
--- /dev/null
+++ b/docs/AggregationMethodOneOf2.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf2
+
+Aggregate data by the mean value: The sum of values divided by number of observations.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf3.md b/docs/AggregationMethodOneOf3.md
new file mode 100644
index 0000000..c785572
--- /dev/null
+++ b/docs/AggregationMethodOneOf3.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf3
+
+Aggregate data by the median value: The n/2-th value when ordered, the average of the (n-1)/2-th and (n+1)/2-th value when n is uneven.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf4.md b/docs/AggregationMethodOneOf4.md
new file mode 100644
index 0000000..145f0ca
--- /dev/null
+++ b/docs/AggregationMethodOneOf4.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf4
+
+The sum of all values summarizes the data for the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf5.md b/docs/AggregationMethodOneOf5.md
new file mode 100644
index 0000000..fde1eca
--- /dev/null
+++ b/docs/AggregationMethodOneOf5.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf5
+
+Use the count of observations in the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf6.md b/docs/AggregationMethodOneOf6.md
new file mode 100644
index 0000000..b1f0085
--- /dev/null
+++ b/docs/AggregationMethodOneOf6.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf6
+
+Use the standard deviation of all observations in the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf7.md b/docs/AggregationMethodOneOf7.md
new file mode 100644
index 0000000..0e2d819
--- /dev/null
+++ b/docs/AggregationMethodOneOf7.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf7
+
+Use the maximum of all values in the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationMethodOneOf8.md b/docs/AggregationMethodOneOf8.md
new file mode 100644
index 0000000..aec74fa
--- /dev/null
+++ b/docs/AggregationMethodOneOf8.md
@@ -0,0 +1,12 @@
+# AggregationMethodOneOf8
+
+Use the minimum of all values in the sample interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AggregationsInner.md b/docs/AggregationsInner.md
new file mode 100644
index 0000000..02e4769
--- /dev/null
+++ b/docs/AggregationsInner.md
@@ -0,0 +1,28 @@
+# AggregationsInner
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.aggregations_inner import AggregationsInner
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AggregationsInner from a JSON string
+aggregations_inner_instance = AggregationsInner.from_json(json)
+# print the JSON string representation of the object
+print AggregationsInner.to_json()
+
+# convert the object into a dict
+aggregations_inner_dict = aggregations_inner_instance.to_dict()
+# create an instance of AggregationsInner from a dict
+aggregations_inner_form_dict = aggregations_inner.from_dict(aggregations_inner_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AlignAt.md b/docs/AlignAt.md
new file mode 100644
index 0000000..26d2498
--- /dev/null
+++ b/docs/AlignAt.md
@@ -0,0 +1,12 @@
+# AlignAt
+
+Possible values for `align.at`. * 'grid' Align to a fixed grid (possibly using timezone information) * 'from' Align a the `from` boundary * 'until' Align a the `until` boundary * 'boundary' Align a the `from` boundary if specified, otherwise the `until` boundary. When not specified, 'grid' is used.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AlignShift.md b/docs/AlignShift.md
new file mode 100644
index 0000000..11dded5
--- /dev/null
+++ b/docs/AlignShift.md
@@ -0,0 +1,12 @@
+# AlignShift
+
+Possible values for `align.shift`. * 'backward': keep the window size of the original interval specification, shifting back. * 'forward': keep the window size of the original interval specification, shifting forward. * 'wrap': enlarge the window size to include all of the original interval. When not specified, 'backward' is used.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Alignment.md b/docs/Alignment.md
new file mode 100644
index 0000000..142a7cd
--- /dev/null
+++ b/docs/Alignment.md
@@ -0,0 +1,33 @@
+# Alignment
+
+Aggregation Alignment Options. Specifies how the aggregation grid is aligned.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**at** | [**AlignAt**](AlignAt.md) | | [optional]
+**shift** | [**AlignShift**](AlignShift.md) | | [optional]
+**freq** | [**AlignmentGridInterval**](AlignmentGridInterval.md) | | [optional]
+**timezone** | [**AlignmentTimezone**](AlignmentTimezone.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.alignment import Alignment
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Alignment from a JSON string
+alignment_instance = Alignment.from_json(json)
+# print the JSON string representation of the object
+print Alignment.to_json()
+
+# convert the object into a dict
+alignment_dict = alignment_instance.to_dict()
+# create an instance of Alignment from a dict
+alignment_form_dict = alignment.from_dict(alignment_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AlignmentGridInterval.md b/docs/AlignmentGridInterval.md
new file mode 100644
index 0000000..0fc004d
--- /dev/null
+++ b/docs/AlignmentGridInterval.md
@@ -0,0 +1,29 @@
+# AlignmentGridInterval
+
+ Defines the grid used to align the aggregation window. The window will align at whole-unit multiples of this interval. For intervals like `PT1D`, that are timezone-dependent, use the `align.timezone` to fix the absolute timestamp of the grid boundaries. If not specified, defaults to the `freq` aggregation interval.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.alignment_grid_interval import AlignmentGridInterval
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AlignmentGridInterval from a JSON string
+alignment_grid_interval_instance = AlignmentGridInterval.from_json(json)
+# print the JSON string representation of the object
+print AlignmentGridInterval.to_json()
+
+# convert the object into a dict
+alignment_grid_interval_dict = alignment_grid_interval_instance.to_dict()
+# create an instance of AlignmentGridInterval from a dict
+alignment_grid_interval_form_dict = alignment_grid_interval.from_dict(alignment_grid_interval_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/AlignmentTimezone.md b/docs/AlignmentTimezone.md
new file mode 100644
index 0000000..072d0ba
--- /dev/null
+++ b/docs/AlignmentTimezone.md
@@ -0,0 +1,29 @@
+# AlignmentTimezone
+
+ The timezone to use when shifting boundaries, especially at day granularity. Also affects the rendering of timestamps when `render.iso_timestamp` is enabled. When not specified, the `UTC` timezone is used.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.alignment_timezone import AlignmentTimezone
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of AlignmentTimezone from a JSON string
+alignment_timezone_instance = AlignmentTimezone.from_json(json)
+# print the JSON string representation of the object
+print AlignmentTimezone.to_json()
+
+# convert the object into a dict
+alignment_timezone_dict = alignment_timezone_instance.to_dict()
+# create an instance of AlignmentTimezone from a dict
+alignment_timezone_form_dict = alignment_timezone.from_dict(alignment_timezone_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/CauseException.md b/docs/CauseException.md
new file mode 100644
index 0000000..3f65c44
--- /dev/null
+++ b/docs/CauseException.md
@@ -0,0 +1,32 @@
+# CauseException
+
+Describes the exception that caused a message.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**type** | **str** | |
+**message** | **str** | |
+**stacktrace** | **List[str]** | |
+
+## Example
+
+```python
+from waylay.services.queries.models.cause_exception import CauseException
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of CauseException from a JSON string
+cause_exception_instance = CauseException.from_json(json)
+# print the JSON string representation of the object
+print CauseException.to_json()
+
+# convert the object into a dict
+cause_exception_dict = cause_exception_instance.to_dict()
+# create an instance of CauseException from a dict
+cause_exception_form_dict = cause_exception.from_dict(cause_exception_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ColumnDataSet.md b/docs/ColumnDataSet.md
new file mode 100644
index 0000000..da94190
--- /dev/null
+++ b/docs/ColumnDataSet.md
@@ -0,0 +1,34 @@
+# ColumnDataSet
+
+Column-oriented dataset with rows header. Timeseries data layout with a rows header containing the index data. The data array contains series data prefixed by series attributes. The `rows` index is prefix by the names of these series attributes. Result for render options `data_axis=row` and `header_array=column`.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**attributes** | [**DataSetAttributes**](DataSetAttributes.md) | | [optional]
+**window_spec** | [**DataSetWindow**](DataSetWindow.md) | | [optional]
+**data_axis** | [**ColumnDataSetDataAxis**](ColumnDataSetDataAxis.md) | | [optional] [default to ColumnDataSetDataAxis.ROW]
+**rows** | [**List[RowHeadersInner]**](RowHeadersInner.md) | Header Attributes for the index data. The initial string-valued headers (normally `resource`, `metric`,`aggregation`) indicate that row to contain series attributes. The remaining object-valued row headers contain the index data. |
+**data** | **List[List[Datum]]** | All metric observation values for a single series. Prefixed by the series attributes. |
+
+## Example
+
+```python
+from waylay.services.queries.models.column_data_set import ColumnDataSet
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ColumnDataSet from a JSON string
+column_data_set_instance = ColumnDataSet.from_json(json)
+# print the JSON string representation of the object
+print ColumnDataSet.to_json()
+
+# convert the object into a dict
+column_data_set_dict = column_data_set_instance.to_dict()
+# create an instance of ColumnDataSet from a dict
+column_data_set_form_dict = column_data_set.from_dict(column_data_set_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ColumnDataSetDataAxis.md b/docs/ColumnDataSetDataAxis.md
new file mode 100644
index 0000000..c3c836b
--- /dev/null
+++ b/docs/ColumnDataSetDataAxis.md
@@ -0,0 +1,11 @@
+# ColumnDataSetDataAxis
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ColumnHeader.md b/docs/ColumnHeader.md
new file mode 100644
index 0000000..3709673
--- /dev/null
+++ b/docs/ColumnHeader.md
@@ -0,0 +1,32 @@
+# ColumnHeader
+
+Column attributes. Attributes that identify and describe the data in this column.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**resource** | **str** | |
+**metric** | **str** | |
+**aggregation** | **str** | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.column_header import ColumnHeader
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ColumnHeader from a JSON string
+column_header_instance = ColumnHeader.from_json(json)
+# print the JSON string representation of the object
+print ColumnHeader.to_json()
+
+# convert the object into a dict
+column_header_dict = column_header_instance.to_dict()
+# create an instance of ColumnHeader from a dict
+column_header_form_dict = column_header.from_dict(column_header_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ColumnHeadersInner.md b/docs/ColumnHeadersInner.md
new file mode 100644
index 0000000..2331b3e
--- /dev/null
+++ b/docs/ColumnHeadersInner.md
@@ -0,0 +1,31 @@
+# ColumnHeadersInner
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**resource** | **str** | |
+**metric** | **str** | |
+**aggregation** | **str** | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.column_headers_inner import ColumnHeadersInner
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ColumnHeadersInner from a JSON string
+column_headers_inner_instance = ColumnHeadersInner.from_json(json)
+# print the JSON string representation of the object
+print ColumnHeadersInner.to_json()
+
+# convert the object into a dict
+column_headers_inner_dict = column_headers_inner_instance.to_dict()
+# create an instance of ColumnHeadersInner from a dict
+column_headers_inner_form_dict = column_headers_inner.from_dict(column_headers_inner_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/DataAxisOption.md b/docs/DataAxisOption.md
new file mode 100644
index 0000000..9253dcb
--- /dev/null
+++ b/docs/DataAxisOption.md
@@ -0,0 +1,12 @@
+# DataAxisOption
+
+Allowed values for the render.data_axis option.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/DataSetAttributes.md b/docs/DataSetAttributes.md
new file mode 100644
index 0000000..7919f8b
--- /dev/null
+++ b/docs/DataSetAttributes.md
@@ -0,0 +1,30 @@
+# DataSetAttributes
+
+Data Set Attributes. Data attributes that apply to all data in this set.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**role** | **str** | The role of series specification that was used to compile this data set. | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.data_set_attributes import DataSetAttributes
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of DataSetAttributes from a JSON string
+data_set_attributes_instance = DataSetAttributes.from_json(json)
+# print the JSON string representation of the object
+print DataSetAttributes.to_json()
+
+# convert the object into a dict
+data_set_attributes_dict = data_set_attributes_instance.to_dict()
+# create an instance of DataSetAttributes from a dict
+data_set_attributes_form_dict = data_set_attributes.from_dict(data_set_attributes_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/DataSetWindow.md b/docs/DataSetWindow.md
new file mode 100644
index 0000000..85e71f9
--- /dev/null
+++ b/docs/DataSetWindow.md
@@ -0,0 +1,32 @@
+# DataSetWindow
+
+Data Window. Statistics of the time axis of a data set. Present with render option `include_window_spec=true`.\",
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**until** | **int** | Exclusive higher bound of the time axis in unix epoch milliseconds. |
+**window** | **str** | Time axis length as ISO8601 period. |
+**freq** | **str** | Time axis aggregation interval as an ISO8601 period . |
+
+## Example
+
+```python
+from waylay.services.queries.models.data_set_window import DataSetWindow
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of DataSetWindow from a JSON string
+data_set_window_instance = DataSetWindow.from_json(json)
+# print the JSON string representation of the object
+print DataSetWindow.to_json()
+
+# convert the object into a dict
+data_set_window_dict = data_set_window_instance.to_dict()
+# create an instance of DataSetWindow from a dict
+data_set_window_form_dict = data_set_window.from_dict(data_set_window_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Datum.md b/docs/Datum.md
new file mode 100644
index 0000000..aff6df6
--- /dev/null
+++ b/docs/Datum.md
@@ -0,0 +1,29 @@
+# Datum
+
+A single metric value for a timeseries. A null value indicates that no (aggregated/interpolated) value exists for the corresponding timestamp.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.datum import Datum
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Datum from a JSON string
+datum_instance = Datum.from_json(json)
+# print the JSON string representation of the object
+print Datum.to_json()
+
+# convert the object into a dict
+datum_dict = datum_instance.to_dict()
+# create an instance of Datum from a dict
+datum_form_dict = datum.from_dict(datum_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/DefaultAggregation.md b/docs/DefaultAggregation.md
new file mode 100644
index 0000000..890a87d
--- /dev/null
+++ b/docs/DefaultAggregation.md
@@ -0,0 +1,29 @@
+# DefaultAggregation
+
+Default aggregation method(s) for the series in the query.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.default_aggregation import DefaultAggregation
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of DefaultAggregation from a JSON string
+default_aggregation_instance = DefaultAggregation.from_json(json)
+# print the JSON string representation of the object
+print DefaultAggregation.to_json()
+
+# convert the object into a dict
+default_aggregation_dict = default_aggregation_instance.to_dict()
+# create an instance of DefaultAggregation from a dict
+default_aggregation_form_dict = default_aggregation.from_dict(default_aggregation_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/DefaultInterpolation.md b/docs/DefaultInterpolation.md
new file mode 100644
index 0000000..6c19e55
--- /dev/null
+++ b/docs/DefaultInterpolation.md
@@ -0,0 +1,32 @@
+# DefaultInterpolation
+
+Default Interpolation method for the series (if aggregated).
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**method** | [**InterpolationMethod**](InterpolationMethod.md) | |
+**value** | **int** | Optional parameter value for the interpolation method (see method description). | [optional]
+**order** | **int** | Optional order parameter for the interpolation method (see method description). | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.default_interpolation import DefaultInterpolation
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of DefaultInterpolation from a JSON string
+default_interpolation_instance = DefaultInterpolation.from_json(json)
+# print the JSON string representation of the object
+print DefaultInterpolation.to_json()
+
+# convert the object into a dict
+default_interpolation_dict = default_interpolation_instance.to_dict()
+# create an instance of DefaultInterpolation from a dict
+default_interpolation_form_dict = default_interpolation.from_dict(default_interpolation_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/DeleteResponse.md b/docs/DeleteResponse.md
new file mode 100644
index 0000000..6d43743
--- /dev/null
+++ b/docs/DeleteResponse.md
@@ -0,0 +1,32 @@
+# DeleteResponse
+
+Confirmation of a delete request.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**messages** | [**List[Message]**](Message.md) | | [optional]
+**links** | [**Dict[str, Links]**](Links.md) | HAL links, indexed by link relation. | [optional]
+**embeddings** | [**Dict[str, Embeddings]**](Embeddings.md) | Hal embeddings, indexed by relation. | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.delete_response import DeleteResponse
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of DeleteResponse from a JSON string
+delete_response_instance = DeleteResponse.from_json(json)
+# print the JSON string representation of the object
+print DeleteResponse.to_json()
+
+# convert the object into a dict
+delete_response_dict = delete_response_instance.to_dict()
+# create an instance of DeleteResponse from a dict
+delete_response_form_dict = delete_response.from_dict(delete_response_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Embeddings.md b/docs/Embeddings.md
new file mode 100644
index 0000000..b6b3869
--- /dev/null
+++ b/docs/Embeddings.md
@@ -0,0 +1,28 @@
+# Embeddings
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.embeddings import Embeddings
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Embeddings from a JSON string
+embeddings_instance = Embeddings.from_json(json)
+# print the JSON string representation of the object
+print Embeddings.to_json()
+
+# convert the object into a dict
+embeddings_dict = embeddings_instance.to_dict()
+# create an instance of Embeddings from a dict
+embeddings_form_dict = embeddings.from_dict(embeddings_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ExecuteApi.md b/docs/ExecuteApi.md
new file mode 100644
index 0000000..fd8b576
--- /dev/null
+++ b/docs/ExecuteApi.md
@@ -0,0 +1,190 @@
+# waylay.services.queries.ExecuteApi
+
+All URIs are relative to *https://api.waylay.io*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**execute_by_name**](ExecuteApi.md#execute_by_name) | **GET** /queries/v1/queries/v1/data/{query_name} | Execute Named Query
+[**execute**](ExecuteApi.md#execute) | **POST** /queries/v1/queries/v1/data | Execute Query
+
+# **execute_by_name**
+> execute_by_name(
+> query_name: str,
+> query: ExecuteByNameQuery,
+> headers
+> ) -> QueryResult
+
+Execute Named Query
+
+Execute a named timeseries query. Retrieves a stored query definition by name, applies overrides from the url parameters, and executes it.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_result import QueryResult
+try:
+ # Execute Named Query
+ # calls `GET /queries/v1/queries/v1/data/{query_name}`
+ api_response = await waylay_client.queries.execute.execute_by_name(
+ 'query_name_example', # query_name | path param "query_name"
+ # query parameters:
+ query = {
+ 'resource': '13efb488-75ac-4dac-828a-d49c5c2ebbfc'
+ 'metric': 'temperature'
+ },
+ headers = {
+ 'accept': 'accept_example',
+ },
+ )
+ print("The response of queries.execute.execute_by_name:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.execute.execute_by_name: %s\n" % e)
+```
+
+### Endpoint
+```
+GET /queries/v1/queries/v1/data/{query_name}
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**query_name** | **str** | path parameter `"query_name"` | |
+**query** | [QueryParamTypes](Operation.md#req_arg_query) \| **None** | URL query parameter | |
+**query['resource']** (dict)
**query.resource** (Query) | **str** | query parameter `"resource"` | Default Resource Override. | [optional]
+**query['metric']** (dict)
**query.metric** (Query) | **str** | query parameter `"metric"` | Default Metric Override. | [optional]
+**query['aggregation']** (dict)
**query.aggregation** (Query) | **AggregationMethod** | query parameter `"aggregation"` | | [optional]
+**query['interpolation']** (dict)
**query.interpolation** (Query) | [**Interpolation**](.md) | query parameter `"interpolation"` | | [optional]
+**query['freq']** (dict)
**query.freq** (Query) | **GroupingIntervalOverride** | query parameter `"freq"` | Override for the `freq` query attribute. | [optional]
+**query['from']** (dict)
**query.var_from** (Query) | **FromOverride** | query parameter `"from"` | | [optional]
+**query['until']** (dict)
**query.until** (Query) | **FromOverride** | query parameter `"until"` | | [optional]
+**query['window']** (dict)
**query.window** (Query) | **WindowOverride** | query parameter `"window"` | | [optional]
+**query['periods']** (dict)
**query.periods** (Query) | **int** | query parameter `"periods"` | | [optional]
+**query['render']** (dict)
**query.render** (Query) | [**Render1**](.md) | query parameter `"render"` | | [optional]
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+**headers['accept']** | **str** | request header `"accept"` | Use a 'text/csv' accept header to get CSV formatted results. | [optional]
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`QueryResult`** | | [QueryResult](QueryResult.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json, text/csv
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **execute**
+> execute(
+> query: ExecuteQuery,
+> headers
+> ) -> QueryResult
+
+Execute Query
+
+Execute a timeseries query. Executes the timeseries query specified in the request body, after applying any overrides from the url parameters.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_input import QueryInput
+from waylay.services.queries.models.query_result import QueryResult
+try:
+ # Execute Query
+ # calls `POST /queries/v1/queries/v1/data`
+ api_response = await waylay_client.queries.execute.execute(
+ # query parameters:
+ query = {
+ 'resource': '13efb488-75ac-4dac-828a-d49c5c2ebbfc'
+ 'metric': 'temperature'
+ },
+ # json data: use a generated model or a json-serializable python data structure (dict, list)
+ json = waylay.services.queries.QueryInput() # QueryInput |
+ headers = {
+ 'accept': 'accept_example',
+ },
+ )
+ print("The response of queries.execute.execute:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.execute.execute: %s\n" % e)
+```
+
+### Endpoint
+```
+POST /queries/v1/queries/v1/data
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**json** | [**QueryInput**](QueryInput.md) | json request body | |
+**query** | [QueryParamTypes](Operation.md#req_arg_query) \| **None** | URL query parameter | |
+**query['resource']** (dict)
**query.resource** (Query) | **str** | query parameter `"resource"` | Default Resource Override. | [optional]
+**query['metric']** (dict)
**query.metric** (Query) | **str** | query parameter `"metric"` | Default Metric Override. | [optional]
+**query['aggregation']** (dict)
**query.aggregation** (Query) | **AggregationMethod** | query parameter `"aggregation"` | | [optional]
+**query['interpolation']** (dict)
**query.interpolation** (Query) | [**Interpolation**](.md) | query parameter `"interpolation"` | | [optional]
+**query['freq']** (dict)
**query.freq** (Query) | **GroupingIntervalOverride** | query parameter `"freq"` | Override for the `freq` query attribute. | [optional]
+**query['from']** (dict)
**query.var_from** (Query) | **FromOverride** | query parameter `"from"` | | [optional]
+**query['until']** (dict)
**query.until** (Query) | **FromOverride** | query parameter `"until"` | | [optional]
+**query['window']** (dict)
**query.window** (Query) | **WindowOverride** | query parameter `"window"` | | [optional]
+**query['periods']** (dict)
**query.periods** (Query) | **int** | query parameter `"periods"` | | [optional]
+**query['render']** (dict)
**query.render** (Query) | [**Render1**](.md) | query parameter `"render"` | | [optional]
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+**headers['accept']** | **str** | request header `"accept"` | Use a 'text/csv' accept header to get CSV formatted results. | [optional]
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`QueryResult`** | | [QueryResult](QueryResult.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: application/json
+ - **Accept**: application/json, text/csv
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/docs/FromOverride.md b/docs/FromOverride.md
new file mode 100644
index 0000000..0e5dc4b
--- /dev/null
+++ b/docs/FromOverride.md
@@ -0,0 +1,28 @@
+# FromOverride
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.from_override import FromOverride
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of FromOverride from a JSON string
+from_override_instance = FromOverride.from_json(json)
+# print the JSON string representation of the object
+print FromOverride.to_json()
+
+# convert the object into a dict
+from_override_dict = from_override_instance.to_dict()
+# create an instance of FromOverride from a dict
+from_override_form_dict = from_override.from_dict(from_override_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/GroupingInterval.md b/docs/GroupingInterval.md
new file mode 100644
index 0000000..6b21816
--- /dev/null
+++ b/docs/GroupingInterval.md
@@ -0,0 +1,29 @@
+# GroupingInterval
+
+Interval used to aggregate or regularize data. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.grouping_interval import GroupingInterval
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of GroupingInterval from a JSON string
+grouping_interval_instance = GroupingInterval.from_json(json)
+# print the JSON string representation of the object
+print GroupingInterval.to_json()
+
+# convert the object into a dict
+grouping_interval_dict = grouping_interval_instance.to_dict()
+# create an instance of GroupingInterval from a dict
+grouping_interval_form_dict = grouping_interval.from_dict(grouping_interval_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/GroupingIntervalOverride.md b/docs/GroupingIntervalOverride.md
new file mode 100644
index 0000000..209ba19
--- /dev/null
+++ b/docs/GroupingIntervalOverride.md
@@ -0,0 +1,29 @@
+# GroupingIntervalOverride
+
+Override for the `freq` query attribute.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.grouping_interval_override import GroupingIntervalOverride
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of GroupingIntervalOverride from a JSON string
+grouping_interval_override_instance = GroupingIntervalOverride.from_json(json)
+# print the JSON string representation of the object
+print GroupingIntervalOverride.to_json()
+
+# convert the object into a dict
+grouping_interval_override_dict = grouping_interval_override_instance.to_dict()
+# create an instance of GroupingIntervalOverride from a dict
+grouping_interval_override_form_dict = grouping_interval_override.from_dict(grouping_interval_override_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/GroupingIntervalOverrideOneOf.md b/docs/GroupingIntervalOverrideOneOf.md
new file mode 100644
index 0000000..90ae709
--- /dev/null
+++ b/docs/GroupingIntervalOverrideOneOf.md
@@ -0,0 +1,12 @@
+# GroupingIntervalOverrideOneOf
+
+When `inferred` is specified, the frequency of aggregation will be inferred from the main/first time series. This can be used to regularize the time series
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/HALLink.md b/docs/HALLink.md
new file mode 100644
index 0000000..c9997a6
--- /dev/null
+++ b/docs/HALLink.md
@@ -0,0 +1,32 @@
+# HALLink
+
+A link target in a HAL response.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**href** | **str** | Target url for this link. |
+**type** | **str** | Type of the resource referenced by this link. | [optional]
+**method** | [**HALLinkMethod**](HALLinkMethod.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.hal_link import HALLink
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of HALLink from a JSON string
+hal_link_instance = HALLink.from_json(json)
+# print the JSON string representation of the object
+print HALLink.to_json()
+
+# convert the object into a dict
+hal_link_dict = hal_link_instance.to_dict()
+# create an instance of HALLink from a dict
+hal_link_form_dict = hal_link.from_dict(hal_link_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/HALLinkMethod.md b/docs/HALLinkMethod.md
new file mode 100644
index 0000000..3bb6158
--- /dev/null
+++ b/docs/HALLinkMethod.md
@@ -0,0 +1,12 @@
+# HALLinkMethod
+
+An http method that can be specified in a HAL link.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/HALLinkRole.md b/docs/HALLinkRole.md
new file mode 100644
index 0000000..2011dea
--- /dev/null
+++ b/docs/HALLinkRole.md
@@ -0,0 +1,12 @@
+# HALLinkRole
+
+Supported link and embedding roles in HAL representations.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/HTTPValidationError.md b/docs/HTTPValidationError.md
new file mode 100644
index 0000000..7b21a31
--- /dev/null
+++ b/docs/HTTPValidationError.md
@@ -0,0 +1,29 @@
+# HTTPValidationError
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**detail** | [**List[ValidationError]**](ValidationError.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.http_validation_error import HTTPValidationError
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of HTTPValidationError from a JSON string
+http_validation_error_instance = HTTPValidationError.from_json(json)
+# print the JSON string representation of the object
+print HTTPValidationError.to_json()
+
+# convert the object into a dict
+http_validation_error_dict = http_validation_error_instance.to_dict()
+# create an instance of HTTPValidationError from a dict
+http_validation_error_form_dict = http_validation_error.from_dict(http_validation_error_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/HeaderArrayOption.md b/docs/HeaderArrayOption.md
new file mode 100644
index 0000000..e585de3
--- /dev/null
+++ b/docs/HeaderArrayOption.md
@@ -0,0 +1,12 @@
+# HeaderArrayOption
+
+Allowed values for the render.header_array option.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Hierarchical.md b/docs/Hierarchical.md
new file mode 100644
index 0000000..6b78e6e
--- /dev/null
+++ b/docs/Hierarchical.md
@@ -0,0 +1,29 @@
+# Hierarchical
+
+if true, use hierarchical objects to represent multiple row (or column) dimensions, otherwise multi-keys get concatenated with a dot-delimiter. If the value is a list, only these levels are kept as separate levels, while remaining levels get concatenated keys
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.hierarchical import Hierarchical
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Hierarchical from a JSON string
+hierarchical_instance = Hierarchical.from_json(json)
+# print the JSON string representation of the object
+print Hierarchical.to_json()
+
+# convert the object into a dict
+hierarchical_dict = hierarchical_instance.to_dict()
+# create an instance of Hierarchical from a dict
+hierarchical_form_dict = hierarchical.from_dict(hierarchical_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Interpolation.md b/docs/Interpolation.md
new file mode 100644
index 0000000..caef36d
--- /dev/null
+++ b/docs/Interpolation.md
@@ -0,0 +1,31 @@
+# Interpolation
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**method** | [**InterpolationMethod**](InterpolationMethod.md) | |
+**value** | **int** | Optional parameter value for the interpolation method (see method description). | [optional]
+**order** | **int** | Optional order parameter for the interpolation method (see method description). | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.interpolation import Interpolation
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Interpolation from a JSON string
+interpolation_instance = Interpolation.from_json(json)
+# print the JSON string representation of the object
+print Interpolation.to_json()
+
+# convert the object into a dict
+interpolation_dict = interpolation_instance.to_dict()
+# create an instance of Interpolation from a dict
+interpolation_form_dict = interpolation.from_dict(interpolation_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethod.md b/docs/InterpolationMethod.md
new file mode 100644
index 0000000..9a3ad8e
--- /dev/null
+++ b/docs/InterpolationMethod.md
@@ -0,0 +1,28 @@
+# InterpolationMethod
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.interpolation_method import InterpolationMethod
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of InterpolationMethod from a JSON string
+interpolation_method_instance = InterpolationMethod.from_json(json)
+# print the JSON string representation of the object
+print InterpolationMethod.to_json()
+
+# convert the object into a dict
+interpolation_method_dict = interpolation_method_instance.to_dict()
+# create an instance of InterpolationMethod from a dict
+interpolation_method_form_dict = interpolation_method.from_dict(interpolation_method_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf.md b/docs/InterpolationMethodOneOf.md
new file mode 100644
index 0000000..33ddb1f
--- /dev/null
+++ b/docs/InterpolationMethodOneOf.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf
+
+Interpolate with the value of the first observed point. This method also extrapolates.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf1.md b/docs/InterpolationMethodOneOf1.md
new file mode 100644
index 0000000..2a4f086
--- /dev/null
+++ b/docs/InterpolationMethodOneOf1.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf1
+
+Interpolate with a fixed, user-specified value. This method also extrapolates.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf10.md b/docs/InterpolationMethodOneOf10.md
new file mode 100644
index 0000000..b0ec73d
--- /dev/null
+++ b/docs/InterpolationMethodOneOf10.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf10
+
+Interpolate with a spline function of a user-specified order.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf11.md b/docs/InterpolationMethodOneOf11.md
new file mode 100644
index 0000000..412bb52
--- /dev/null
+++ b/docs/InterpolationMethodOneOf11.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf11
+
+Interpolate with the derivative of order 1.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf12.md b/docs/InterpolationMethodOneOf12.md
new file mode 100644
index 0000000..9bc6a73
--- /dev/null
+++ b/docs/InterpolationMethodOneOf12.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf12
+
+Interpolate with a piecewise cubic spline function.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf13.md b/docs/InterpolationMethodOneOf13.md
new file mode 100644
index 0000000..25e52fd
--- /dev/null
+++ b/docs/InterpolationMethodOneOf13.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf13
+
+Interpolate with a non-smoothing spline of order 2, called Akima interpolation.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf2.md b/docs/InterpolationMethodOneOf2.md
new file mode 100644
index 0000000..d6f08e4
--- /dev/null
+++ b/docs/InterpolationMethodOneOf2.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf2
+
+Same as pad, but using the last observed value. This method also extrapolates
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf3.md b/docs/InterpolationMethodOneOf3.md
new file mode 100644
index 0000000..21a20e0
--- /dev/null
+++ b/docs/InterpolationMethodOneOf3.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf3
+
+Linearly go from the first observed value of the gap to the last observed oneThis method also extrapolates
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf4.md b/docs/InterpolationMethodOneOf4.md
new file mode 100644
index 0000000..c18a07b
--- /dev/null
+++ b/docs/InterpolationMethodOneOf4.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf4
+
+Use the value that is closest in time.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf5.md b/docs/InterpolationMethodOneOf5.md
new file mode 100644
index 0000000..a538ab9
--- /dev/null
+++ b/docs/InterpolationMethodOneOf5.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf5
+
+Interpolate with a spline function of order 0, which is a piecewise polynomial.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf6.md b/docs/InterpolationMethodOneOf6.md
new file mode 100644
index 0000000..fe547ed
--- /dev/null
+++ b/docs/InterpolationMethodOneOf6.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf6
+
+Interpolate with a spline function of order 1, which is a piecewise polynomial.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf7.md b/docs/InterpolationMethodOneOf7.md
new file mode 100644
index 0000000..cee1408
--- /dev/null
+++ b/docs/InterpolationMethodOneOf7.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf7
+
+Interpolate with a spline function of order 2, which is a piecewise polynomial.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf8.md b/docs/InterpolationMethodOneOf8.md
new file mode 100644
index 0000000..8976ec3
--- /dev/null
+++ b/docs/InterpolationMethodOneOf8.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf8
+
+Interpolate with a spline function of order 3, which is a piecewise polynomial.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationMethodOneOf9.md b/docs/InterpolationMethodOneOf9.md
new file mode 100644
index 0000000..d3a3fd2
--- /dev/null
+++ b/docs/InterpolationMethodOneOf9.md
@@ -0,0 +1,12 @@
+# InterpolationMethodOneOf9
+
+Interpolate with a polynomial of the lowest possible degree passing trough the data points.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/InterpolationSpec.md b/docs/InterpolationSpec.md
new file mode 100644
index 0000000..5ac6228
--- /dev/null
+++ b/docs/InterpolationSpec.md
@@ -0,0 +1,32 @@
+# InterpolationSpec
+
+Defines whether, and how to treat missing values. This can occur in two circumstances when aggregating (setting a sample frequency): * missing values: if there are missing (or invalid) values stored for a given freq-interval, \"interpolation\" specifies how to compute these. * down-sampling: when the specified freq is smaller than the series’ actual frequency. \"interpolation\" specifies how to compute intermediate values.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**method** | [**InterpolationMethod**](InterpolationMethod.md) | |
+**value** | **int** | Optional parameter value for the interpolation method (see method description). | [optional]
+**order** | **int** | Optional order parameter for the interpolation method (see method description). | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.interpolation_spec import InterpolationSpec
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of InterpolationSpec from a JSON string
+interpolation_spec_instance = InterpolationSpec.from_json(json)
+# print the JSON string representation of the object
+print InterpolationSpec.to_json()
+
+# convert the object into a dict
+interpolation_spec_dict = interpolation_spec_instance.to_dict()
+# create an instance of InterpolationSpec from a dict
+interpolation_spec_form_dict = interpolation_spec.from_dict(interpolation_spec_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Links.md b/docs/Links.md
new file mode 100644
index 0000000..ca4fe11
--- /dev/null
+++ b/docs/Links.md
@@ -0,0 +1,31 @@
+# Links
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**href** | **str** | Target url for this link. |
+**type** | **str** | Type of the resource referenced by this link. | [optional]
+**method** | [**HALLinkMethod**](HALLinkMethod.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.links import Links
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Links from a JSON string
+links_instance = Links.from_json(json)
+# print the JSON string representation of the object
+print Links.to_json()
+
+# convert the object into a dict
+links_dict = links_instance.to_dict()
+# create an instance of Links from a dict
+links_form_dict = links.from_dict(links_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/LocationInner.md b/docs/LocationInner.md
new file mode 100644
index 0000000..c44e8a0
--- /dev/null
+++ b/docs/LocationInner.md
@@ -0,0 +1,28 @@
+# LocationInner
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.location_inner import LocationInner
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of LocationInner from a JSON string
+location_inner_instance = LocationInner.from_json(json)
+# print the JSON string representation of the object
+print LocationInner.to_json()
+
+# convert the object into a dict
+location_inner_dict = location_inner_instance.to_dict()
+# create an instance of LocationInner from a dict
+location_inner_form_dict = location_inner.from_dict(location_inner_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ManageApi.md b/docs/ManageApi.md
new file mode 100644
index 0000000..e008943
--- /dev/null
+++ b/docs/ManageApi.md
@@ -0,0 +1,370 @@
+# waylay.services.queries.ManageApi
+
+All URIs are relative to *https://api.waylay.io*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**create**](ManageApi.md#create) | **POST** /queries/v1/queries/v1/query | Post Query
+[**get**](ManageApi.md#get) | **GET** /queries/v1/queries/v1/query/{query_name} | Get Query
+[**list**](ManageApi.md#list) | **GET** /queries/v1/queries/v1/query | List Queries
+[**remove**](ManageApi.md#remove) | **DELETE** /queries/v1/queries/v1/query/{query_name} | Remove Query
+[**update**](ManageApi.md#update) | **PUT** /queries/v1/queries/v1/query/{query_name} | Update Query
+
+# **create**
+> create(
+> headers
+> ) -> QueryResponse
+
+Post Query
+
+Create a new named query.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_entity_input import QueryEntityInput
+from waylay.services.queries.models.query_response import QueryResponse
+try:
+ # Post Query
+ # calls `POST /queries/v1/queries/v1/query`
+ api_response = await waylay_client.queries.manage.create(
+ # json data: use a generated model or a json-serializable python data structure (dict, list)
+ json = waylay.services.queries.QueryEntityInput() # QueryEntityInput |
+ )
+ print("The response of queries.manage.create:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.manage.create: %s\n" % e)
+```
+
+### Endpoint
+```
+POST /queries/v1/queries/v1/query
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**json** | [**QueryEntityInput**](QueryEntityInput.md) | json request body | |
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`QueryResponse`** | | [QueryResponse](QueryResponse.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: application/json
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **get**
+> get(
+> query_name: str,
+> headers
+> ) -> QueryResponse
+
+Get Query
+
+Get the definition of a named query.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_response import QueryResponse
+try:
+ # Get Query
+ # calls `GET /queries/v1/queries/v1/query/{query_name}`
+ api_response = await waylay_client.queries.manage.get(
+ 'query_name_example', # query_name | path param "query_name"
+ )
+ print("The response of queries.manage.get:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.manage.get: %s\n" % e)
+```
+
+### Endpoint
+```
+GET /queries/v1/queries/v1/query/{query_name}
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**query_name** | **str** | path parameter `"query_name"` | Name of the stored query. |
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`QueryResponse`** | | [QueryResponse](QueryResponse.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **list**
+> list(
+> query: ListQuery,
+> headers
+> ) -> QueriesListResponse
+
+List Queries
+
+List named queries.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.queries_list_response import QueriesListResponse
+try:
+ # List Queries
+ # calls `GET /queries/v1/queries/v1/query`
+ api_response = await waylay_client.queries.manage.list(
+ # query parameters:
+ query = {
+ 'q': ''
+ 'limit': 10
+ 'offset': 0
+ },
+ )
+ print("The response of queries.manage.list:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.manage.list: %s\n" % e)
+```
+
+### Endpoint
+```
+GET /queries/v1/queries/v1/query
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**query** | [QueryParamTypes](Operation.md#req_arg_query) \| **None** | URL query parameter | |
+**query['q']** (dict)
**query.q** (Query) | **str** | query parameter `"q"` | The QDSL filter condition for the stored queries. Note that this value needs to be escaped when passed as an url paramater. | [optional] [default '']
+**query['limit']** (dict)
**query.limit** (Query) | **int** | query parameter `"limit"` | Maximal number of items return in one response. | [optional] [default 10]
+**query['offset']** (dict)
**query.offset** (Query) | **int** | query parameter `"offset"` | Numbers of items to skip before listing results in the response page. | [optional] [default 0]
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`QueriesListResponse`** | | [QueriesListResponse](QueriesListResponse.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **remove**
+> remove(
+> query_name: str,
+> headers
+> ) -> DeleteResponse
+
+Remove Query
+
+Remove definition of a named query.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.delete_response import DeleteResponse
+try:
+ # Remove Query
+ # calls `DELETE /queries/v1/queries/v1/query/{query_name}`
+ api_response = await waylay_client.queries.manage.remove(
+ 'query_name_example', # query_name | path param "query_name"
+ )
+ print("The response of queries.manage.remove:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.manage.remove: %s\n" % e)
+```
+
+### Endpoint
+```
+DELETE /queries/v1/queries/v1/query/{query_name}
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**query_name** | **str** | path parameter `"query_name"` | Name of the stored query. |
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`DeleteResponse`** | | [DeleteResponse](DeleteResponse.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
+# **update**
+> update(
+> query_name: str,
+> headers
+> ) -> QueryResponse
+
+Update Query
+
+Create or update a named query definition.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_definition import QueryDefinition
+from waylay.services.queries.models.query_response import QueryResponse
+try:
+ # Update Query
+ # calls `PUT /queries/v1/queries/v1/query/{query_name}`
+ api_response = await waylay_client.queries.manage.update(
+ 'query_name_example', # query_name | path param "query_name"
+ # json data: use a generated model or a json-serializable python data structure (dict, list)
+ json = waylay.services.queries.QueryDefinition() # QueryDefinition |
+ )
+ print("The response of queries.manage.update:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.manage.update: %s\n" % e)
+```
+
+### Endpoint
+```
+PUT /queries/v1/queries/v1/query/{query_name}
+```
+### Parameters
+
+Name | Type | API binding | Description | Notes
+-------- | ----- | ------------- | ------------- | -------------
+**query_name** | **str** | path parameter `"query_name"` | Name of the stored query. |
+**json** | [**QueryDefinition**](QueryDefinition.md) | json request body | |
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`QueryResponse`** | | [QueryResponse](QueryResponse.md)
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: application/json
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+**422** | Validation Error | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/docs/Message.md b/docs/Message.md
new file mode 100644
index 0000000..0a9dac2
--- /dev/null
+++ b/docs/Message.md
@@ -0,0 +1,33 @@
+# Message
+
+Individual (info/warning/error) message in a response.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**code** | **str** | | [optional]
+**message** | **str** | |
+**level** | [**MessageLevel**](MessageLevel.md) | | [optional] [default to MessageLevel.INFO]
+**args** | **object** | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.message import Message
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Message from a JSON string
+message_instance = Message.from_json(json)
+# print the JSON string representation of the object
+print Message.to_json()
+
+# convert the object into a dict
+message_dict = message_instance.to_dict()
+# create an instance of Message from a dict
+message_form_dict = message.from_dict(message_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/MessageArguments.md b/docs/MessageArguments.md
new file mode 100644
index 0000000..4517bee
--- /dev/null
+++ b/docs/MessageArguments.md
@@ -0,0 +1,30 @@
+# MessageArguments
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**resource** | **str** | | [optional]
+**metric** | **str** | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.message_arguments import MessageArguments
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of MessageArguments from a JSON string
+message_arguments_instance = MessageArguments.from_json(json)
+# print the JSON string representation of the object
+print MessageArguments.to_json()
+
+# convert the object into a dict
+message_arguments_dict = message_arguments_instance.to_dict()
+# create an instance of MessageArguments from a dict
+message_arguments_form_dict = message_arguments.from_dict(message_arguments_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/MessageLevel.md b/docs/MessageLevel.md
new file mode 100644
index 0000000..b8043e8
--- /dev/null
+++ b/docs/MessageLevel.md
@@ -0,0 +1,11 @@
+# MessageLevel
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/MessageProperties.md b/docs/MessageProperties.md
new file mode 100644
index 0000000..a24f558
--- /dev/null
+++ b/docs/MessageProperties.md
@@ -0,0 +1,31 @@
+# MessageProperties
+
+Additional message arguments.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**resource** | **str** | | [optional]
+**metric** | **str** | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.message_properties import MessageProperties
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of MessageProperties from a JSON string
+message_properties_instance = MessageProperties.from_json(json)
+# print the JSON string representation of the object
+print MessageProperties.to_json()
+
+# convert the object into a dict
+message_properties_dict = message_properties_instance.to_dict()
+# create an instance of MessageProperties from a dict
+message_properties_form_dict = message_properties.from_dict(message_properties_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ObjectData.md b/docs/ObjectData.md
new file mode 100644
index 0000000..fd978f6
--- /dev/null
+++ b/docs/ObjectData.md
@@ -0,0 +1,36 @@
+# ObjectData
+
+Result data for a timestamp in object format.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**timestamp** | **int** | Unix epoch milliseconds timestamp. |
+**timestamp_iso** | **datetime** | ISO8601 rendering of the timestamp, present when `render.iso_timestamp=true` | [optional]
+**role** | **str** | The role of series specification that was used to compile this data set. | [optional]
+**resource** | **str** | Series resource id, if applicable for all values. | [optional]
+**metric** | **str** | Series metric, if applicable for all values. | [optional]
+**aggregation** | **str** | Series aggregation, if applicable for all values. | [optional]
+**levels** | **List[str]** | Attribute level names used to key the values for this observation. Levels that are flattened have a dot-separated key. If all observations have the same attribute for a level, that level might be omitted. | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.object_data import ObjectData
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ObjectData from a JSON string
+object_data_instance = ObjectData.from_json(json)
+# print the JSON string representation of the object
+print ObjectData.to_json()
+
+# convert the object into a dict
+object_data_dict = object_data_instance.to_dict()
+# create an instance of ObjectData from a dict
+object_data_form_dict = object_data.from_dict(object_data_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ObjectDataSet.md b/docs/ObjectDataSet.md
new file mode 100644
index 0000000..d42369c
--- /dev/null
+++ b/docs/ObjectDataSet.md
@@ -0,0 +1,32 @@
+# ObjectDataSet
+
+Data result in object format. Result item when render option `render.header_array` is not set. The data values are keyed by their attributes (`resource`, `metric`, `aggregation`), according to the render options: * _hierachical_: for each level, a sub-object is created (e.g. `render.mode=hier_dict`) * _flattened_: the attributes are '.'-separated concatenation of the attributes (e.g `render.mode=flat_dict`) * _mixed_: (.e.g. `render.mode=metric_flat_dict`) a single level (e.g. `metric`) is used as main key, any remaining levels (`resource`,`aggregation`) are indicated with a flattened subkey. When `render.rollup=true`, the attribute levels that are the same for all series are not used as key, but reported as a data or table attribute.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**attributes** | [**DataSetAttributes**](DataSetAttributes.md) | | [optional]
+**window_spec** | [**DataSetWindow**](DataSetWindow.md) | | [optional]
+**data** | [**List[ObjectData]**](ObjectData.md) | |
+
+## Example
+
+```python
+from waylay.services.queries.models.object_data_set import ObjectDataSet
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ObjectDataSet from a JSON string
+object_data_set_instance = ObjectDataSet.from_json(json)
+# print the JSON string representation of the object
+print ObjectDataSet.to_json()
+
+# convert the object into a dict
+object_data_set_dict = object_data_set_instance.to_dict()
+# create an instance of ObjectDataSet from a dict
+object_data_set_form_dict = object_data_set.from_dict(object_data_set_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ObjectDataValue.md b/docs/ObjectDataValue.md
new file mode 100644
index 0000000..639218d
--- /dev/null
+++ b/docs/ObjectDataValue.md
@@ -0,0 +1,28 @@
+# ObjectDataValue
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.object_data_value import ObjectDataValue
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ObjectDataValue from a JSON string
+object_data_value_instance = ObjectDataValue.from_json(json)
+# print the JSON string representation of the object
+print ObjectDataValue.to_json()
+
+# convert the object into a dict
+object_data_value_dict = object_data_value_instance.to_dict()
+# create an instance of ObjectDataValue from a dict
+object_data_value_form_dict = object_data_value.from_dict(object_data_value_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Operation.md b/docs/Operation.md
new file mode 100644
index 0000000..dcba35d
--- /dev/null
+++ b/docs/Operation.md
@@ -0,0 +1,336 @@
+
+
+# REST _operation methods_
+
+The SDK service module `waylay.services.queries.service` is a generated
+plugin for the Waylay Python SDK.
+
+For each of the operations described in the
+[OpenAPI specification](https://docs.waylay.io/openapi/sdk/redocly/queries.html)
+of the service, a python _operation_ method is generated.
+
+These _operation methods_ have a standard sequence of (_positional_ and _named_) arguments,
+ as illustrated by this example REST operation
+`POST /demo/v5/gadgets/{id}/trinkets/combatulate` with _operationId_ `combatulateTrinket`
+
+## Example request
+
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ # required path parameters (positional), for the `id` path parameter
+ '23456',
+ # required or optional query parameters (named)
+ query={
+ 'compression_level': 15
+ },
+ # request body named arguments (named)
+ # in this case using generated model for an `application/json` request
+ json=CombatulationRecipe(mode='traditional', decoration='minimal'),
+ # optional http headers (named)
+ headers: {
+ 'accept' : 'text/csv'
+ },
+ # optional named arguments that specify how the response should be rendered (named)
+ raw_response=False,
+ select_path=None,
+ response_type=None,
+ # optional flag to disable the validation of the request arguments when using typed requests
+ validate_request=True,
+ # optional named arguments passed to the http client (named)
+ timeout=10.0,
+)
+```
+
+# Supported request arguments
+
+Each _operation method_ of this SDK uses the following arguments:
+
+## Argument overview
+* [`*path_args: str`](#req_arg_path) required positional path parameters
+* [`query: QueryRequest`](#req_arg_query) url query parameters
+* [request body](#req_arg_body) arguments
+ * [`json: Any`](#req_arg_json) a model instance or python data structure for a json request (`application/json`)
+ * [`content: ContentRequest`](#req_arg_content) raw bytes of any content type
+ * [`files: FilesRequest`](#req_arg_files) a multi-part request (`multipart/form-data`)
+ * [`data: DataRequest`](#req_arg_data) an url-encoded form (`application/x-www-form-urlencoded`), or additional non-file parts of a multi-part request.
+* [`headers: HeadersRequest`](#req_arg_headers) http request headers
+* [response rendering](#req_arg_render) arguments that specify how the response is presented
+ * [`raw_response: bool`](#req_arg_raw): if `True` returns a http `Response` object
+ * [`select_path: str`](#req_arg_select): used on a `json` `dict` response to select the relevant part of the response.
+ * [`response_type: Type | None`](#req_arg_response_type): parse the response as an instance of specified type.
+* [http client](#req_arg_client) arguments that influence the handling of the http call.
+
+## Typing of arguments
+The generated methods of this SDK will include additional type information, including
+* the actual names of the the path arguments
+* model types for the `json` request argument
+* keys and values for documented `query`, `header` and `data`
+
+The most relevant request body arguments (of `json`, `files`, `data`, `content`)
+are explicitly documented, leaving the others as optional `**kwargs` named parameters.
+
+But even if not explicitly documented in the typed signature, you can use the other request body arguments,
+assuming the server supports it.
+
+## Using `content` to send a request body.
+The `content` argument is always available as named argument.
+Even if e.g. the typing of `combatulate_trinket` method suggests to use a `json` argument,
+you can alternatively specify a binary `content` request to stream an already json-encoded request from file.
+
+```python
+binary_req_body = b'{"mode":"traditional","decoration":"minimal"}'
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '23456',
+ content=binary_req_body,
+ headers={'content-type' : 'application/json'}
+)
+```
+
+## Path parameter arguments
+In case the REST operation supports path parameters that configure the request URL,
+they are presented as required (positional) initial parameters of type `str`.
+
+In the example above, the first `'23456'` corresponds to the `id` path parameter,
+and leads to a call with url
+```
+POST /demo/v5/gadgets/23456/trinkets/combatulate
+```
+
+## Query parameter arguments
+```python
+query: QueryRequest
+```
+with types
+```python
+QueryRequest = QueryMap | QueryEntries | QueryString | Model
+PrimitiveData = str | int | float | bool | None
+# a mapping of query keys to values or list of values
+QueryMap = Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]]
+# a list of query tuples, with repeated keys
+QueryEntries = List[Tuple[str, PrimitiveData]] | Tuple[Tuple[str, PrimitiveData], ...]
+# a query string, to be used as is on the request
+QueryString = str | bytes
+```
+
+The named `query` argument holds all url query parameters.
+Any query arguments described in the OpenAPI document will typed, but undocumented
+query parameters will be forwarded. E.g. using
+```
+query={ 'compression_level': 15, 'debug': True }
+```
+Will lead to a request with url query parameters
+```
+POST /demo/v5/gadgets/23456/trinkets/combatulate?compression_level=15&debug=true
+```
+
+Any model instance specified as an argument will be converted to its `dict` representation first,
+which has to be compatible with a `QueryMap`
+
+## Request body arguments
+The following cases are supported for request body arguments
+* [`json: Any`](#req_arg_json) a model instance or python data structure for a json request (`application/json`)
+* [`content: ContentRequest`](#req_arg_content) raw bytes of any content type
+* [`files: FilesRequest`](#req_arg_files) a multi-part request (`multipart/form-data`)
+* [`data: DataRequest`](#req_arg_data) an url-encoded form (`application/x-www-form-urlencoded`), or additional non-file parts of a multi-part request.
+
+### JSON request argument `json`
+The `json` argument allows the user to specify a `application/json` request body, using a
+generated instance of a generated _Model_ class, or as a python data structure.
+
+```python
+json: Any
+```
+
+Most REST operations use a JSON (`application/json`) request body, and the SDK service module
+will provide typed _model_ classes for them.
+
+These requests can be passed as a `json` named parameter, either as _model instances_ or as plain
+python `dict`, `array` or primitive types that are JSON-serializable.
+
+#### Example
+The following examples assume that the server supports `application/json` requests.
+
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '43466',
+ json=CombatulationRecipe(mode='traditional', decoration='minimal')
+)
+```
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '43466',
+ json={'mode':'traditional', 'decoration':'minimal'}
+)
+```
+Will both send a json request with payload `{"mode":"traditional","decoration":"minimal"}` to the server,
+
+assuming that `CombatulationRecipe` is a model class that does not include additional default attributes.
+
+### Binary request argument `content`
+The `content` argument allows the user to specify a raw binary request of any content type.
+
+```python
+content: ContentRequest
+```
+with types
+```python
+ContentRequest = bytes | str | Iterable[bytes] | AsyncIterable[bytes] | IO[bytes]
+```
+
+For operations with non-JSON request bodies, the request body must be specified in its binary form
+using the `content` argument.
+
+Unless a default `content-type` is defined in the OpenAPI specification, or at the server, you need
+to specify a `content-type` header.
+
+Supported values for the `content` argument are:
+* a `bytes` instance such as `b'abc'`
+* `str` instances will be converted to `bytes` using the `utf-8` encoding.
+* an `Iterable` that produces `bytes`
+* an `AsyncIterable` that produces bytes
+* a `IO[bytes]` object that is converted to a `Iterable[bytes]` (if it not yet is).
+
+When the SDK can infer a total length of the bytes stream (e.g. when attached to a file),
+the request will be uploaded as one stream with a `content-length` header indicating the length.
+
+Otherwise, the content is sent in chuncks (using `"Transfer-Encoding": "chunked"`),
+looping over the iterable or buffered reads from the stream.
+
+#### examples
+Using a bytes string:
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '23456',
+ content=b'{"mode":"traditional","decoration":"minimal"}',
+ headers={'content-type' : 'application/json'}
+)
+```
+Using an iterator with `bytes` chunks:
+```python
+def generate_chunks():
+ yield b'{"mode":'
+ yield b'"traditional",'
+ yield b'"decoration":"minimal"'
+ yield b'}'
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '23456',
+ content=generate_chunks(),
+ headers={'content-type' : 'application/json'}
+)
+```
+
+From file, assuming the server supports xml requests:
+```python
+with open('~/combatulation_requests/example_23456.xml') as xml_file:
+ response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '23456',
+ content=xml_file,
+ headers={'content-type' : 'text/xml'}
+ )
+```
+
+### Multipart file request argument `files` (and `data`)
+The `files` argument triggers the composition of a `multipart/form-data` request.
+```python
+files: FilesRequest
+data: DataRequest
+```
+with types
+```python
+FilesRequest = Mapping[str, FileTypes] | Sequence[Tuple[str, FileTypes]]
+DataRequest = Optional[Mapping[str, bytes | PrimitiveData]]
+
+FileTypes = FileContent | FileNameContent | FileNameContentType | FileNameContentTypeHeaders
+
+# the raw content as bytes (stream)
+FileContent = Union[IO[bytes], bytes, str]
+# a file name and content
+FileNameContent = [Optional[str], FileContent]
+# a file name, content and mediatype string
+FileNameContentType = [Optional[str], FileContent, Optional[str]]
+# a file name, content, mediatype and additional headers
+FileNameContentTypeHeaders = [Optional[str], FileContent, Optional[str], Mapping[str, str]]
+
+PrimitiveData = str | int | float | bool | None
+```
+
+When the REST operation supports a multipart file request (content type `multipart/form-data`),
+use the `files` named argument to construct such request.
+Each entry in the `files` argument ends up as one part of a multipart request, using the specified
+part name.
+
+You can provide the raw bytes (stream) only,
+or optionally specify the `filename`, `content-type` or additional `headers` for each part.
+
+When a `data` argument is specified, these will be added as additional non-file parts,
+
+#### Example
+The following examples assume that the server supports `multipart/form-data` requests.
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '43466',
+ files={'background': open('~/images/deepblue.png)},
+ data={'mode':'traditional', 'decoration':'minimal'}
+)
+```
+
+Will send the data as `multipart/form-data`, with three sections `background`, `mode` and `decoration`.
+
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '43466',
+ files={
+ 'background': ['deepblue.png', open('~/images/deepblue.png),'image/png']
+ },
+ data={'mode':'traditional', 'decoration':'minimal'}
+)
+```
+sends the same data, but a filename and content type is added to the `background` part.
+
+
+
+
+### Url-encoded form data `data`
+The `data` argument triggers the composition of an `application/x-www-form-urlencoded` html form request.
+
+```python
+data: Optional[Mapping[str, bytes | PrimitiveData]]
+```
+
+For operations that use url-encoded form data (content type `application/x-www-form-urlencoded`),
+use the `data` named argument (without a `files` argument).
+
+The http client will in that case use that content-type and encode the using that style.
+
+These type of operations are normally meant to support [simple html forms](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/POST#example).
+
+#### Example
+The following example assumes that the server supports `application/x-www-form-urlencoded` requests.
+
+```python
+response = await waylay_client.demo.gadgets.combatulate_trinket(
+ '43466',
+ data={'mode':'traditional', 'decoration':'minimal'}
+)
+```
+Will send the data with content-type `application/x-www-form-urlencoded`, as if an html form submitted
+this request with form inputs `mode` and `decoration`.
+
+## Request `headers` argument
+
+TODO
+
+## Response rendering arguments
+
+### Render a raw http response: `raw_response`
+TODO
+
+### Select a part of the response: `select_path`
+TODO
+
+### Parse the 2XX response as an instance of type: `response_type`
+TODO
+
+## Http client arguments
+
+TODO
diff --git a/docs/QueriesListResponse.md b/docs/QueriesListResponse.md
new file mode 100644
index 0000000..5ebd7a7
--- /dev/null
+++ b/docs/QueriesListResponse.md
@@ -0,0 +1,36 @@
+# QueriesListResponse
+
+Listing of named queries, with paging links.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**messages** | [**List[Message]**](Message.md) | | [optional]
+**queries** | [**List[QueryListItem]**](QueryListItem.md) | One page of matching query definitions. |
+**count** | **int** | Number of query definitions returned in the current response. |
+**offset** | **int** | Offset in the full listing (skipped definitions). |
+**limit** | **int** | Maximal number of query definitions returned in one response. |
+**total_count** | **int** | Total number of query definitions matching the filter. | [optional]
+**links** | [**QueryListHALLinks**](QueryListHALLinks.md) | |
+
+## Example
+
+```python
+from waylay.services.queries.models.queries_list_response import QueriesListResponse
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueriesListResponse from a JSON string
+queries_list_response_instance = QueriesListResponse.from_json(json)
+# print the JSON string representation of the object
+print QueriesListResponse.to_json()
+
+# convert the object into a dict
+queries_list_response_dict = queries_list_response_instance.to_dict()
+# create an instance of QueriesListResponse from a dict
+queries_list_response_form_dict = queries_list_response.from_dict(queries_list_response_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryDefinition.md b/docs/QueryDefinition.md
new file mode 100644
index 0000000..e84c17a
--- /dev/null
+++ b/docs/QueryDefinition.md
@@ -0,0 +1,42 @@
+# QueryDefinition
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**meta** | **object** | User metadata for the query definition. | [optional]
+**query** | [**QueryInput**](QueryInput.md) | | [optional]
+**resource** | **str** | Default resource for the series in the query. | [optional]
+**metric** | **str** | Default metric for the series in the query. | [optional]
+**aggregation** | [**DefaultAggregation**](DefaultAggregation.md) | | [optional]
+**interpolation** | [**DefaultInterpolation**](DefaultInterpolation.md) | | [optional]
+**freq** | [**GroupingInterval**](GroupingInterval.md) | | [optional]
+**var_from** | [**TimeWindowFrom**](TimeWindowFrom.md) | | [optional]
+**until** | [**TimeWindowUntil**](TimeWindowUntil.md) | | [optional]
+**window** | [**Window**](Window.md) | | [optional]
+**periods** | **int** | The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers. | [optional]
+**align** | [**Alignment**](Alignment.md) | | [optional]
+**data** | [**List[SeriesSpec]**](SeriesSpec.md) | List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ). | [optional]
+**render** | [**Render**](Render.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_definition import QueryDefinition
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryDefinition from a JSON string
+query_definition_instance = QueryDefinition.from_json(json)
+# print the JSON string representation of the object
+print QueryDefinition.to_json()
+
+# convert the object into a dict
+query_definition_dict = query_definition_instance.to_dict()
+# create an instance of QueryDefinition from a dict
+query_definition_form_dict = query_definition.from_dict(query_definition_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryEntityInput.md b/docs/QueryEntityInput.md
new file mode 100644
index 0000000..2670f25
--- /dev/null
+++ b/docs/QueryEntityInput.md
@@ -0,0 +1,32 @@
+# QueryEntityInput
+
+Input data to create a query definition.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**name** | **str** | Name of the stored query definition. |
+**meta** | **object** | User metadata for the query definition. | [optional]
+**query** | [**QueryInput**](QueryInput.md) | |
+
+## Example
+
+```python
+from waylay.services.queries.models.query_entity_input import QueryEntityInput
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryEntityInput from a JSON string
+query_entity_input_instance = QueryEntityInput.from_json(json)
+# print the JSON string representation of the object
+print QueryEntityInput.to_json()
+
+# convert the object into a dict
+query_entity_input_dict = query_entity_input_instance.to_dict()
+# create an instance of QueryEntityInput from a dict
+query_entity_input_form_dict = query_entity_input.from_dict(query_entity_input_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryExecutionMessage.md b/docs/QueryExecutionMessage.md
new file mode 100644
index 0000000..60a1a6c
--- /dev/null
+++ b/docs/QueryExecutionMessage.md
@@ -0,0 +1,36 @@
+# QueryExecutionMessage
+
+A message object that informs or warns about a query execution issue.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**message** | **str** | A human readable message. |
+**level** | [**QueryExecutionMessageLevel**](QueryExecutionMessageLevel.md) | |
+**timestamp** | **datetime** | |
+**action** | **str** | The request action that caused this message. |
+**category** | **str** | The subsystem that issued this message. |
+**properties** | [**MessageArguments**](MessageArguments.md) | | [optional]
+**exception** | [**CauseException**](CauseException.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_execution_message import QueryExecutionMessage
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryExecutionMessage from a JSON string
+query_execution_message_instance = QueryExecutionMessage.from_json(json)
+# print the JSON string representation of the object
+print QueryExecutionMessage.to_json()
+
+# convert the object into a dict
+query_execution_message_dict = query_execution_message_instance.to_dict()
+# create an instance of QueryExecutionMessage from a dict
+query_execution_message_form_dict = query_execution_message.from_dict(query_execution_message_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryExecutionMessageLevel.md b/docs/QueryExecutionMessageLevel.md
new file mode 100644
index 0000000..39d2ffd
--- /dev/null
+++ b/docs/QueryExecutionMessageLevel.md
@@ -0,0 +1,11 @@
+# QueryExecutionMessageLevel
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryHALLinks.md b/docs/QueryHALLinks.md
new file mode 100644
index 0000000..b6cca40
--- /dev/null
+++ b/docs/QueryHALLinks.md
@@ -0,0 +1,31 @@
+# QueryHALLinks
+
+HAL Links for a query entity.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**var_self** | [**HALLink**](HALLink.md) | |
+**execute** | [**HALLink**](HALLink.md) | |
+
+## Example
+
+```python
+from waylay.services.queries.models.query_hal_links import QueryHALLinks
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryHALLinks from a JSON string
+query_hal_links_instance = QueryHALLinks.from_json(json)
+# print the JSON string representation of the object
+print QueryHALLinks.to_json()
+
+# convert the object into a dict
+query_hal_links_dict = query_hal_links_instance.to_dict()
+# create an instance of QueryHALLinks from a dict
+query_hal_links_form_dict = query_hal_links.from_dict(query_hal_links_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryInput.md b/docs/QueryInput.md
new file mode 100644
index 0000000..a7ee406
--- /dev/null
+++ b/docs/QueryInput.md
@@ -0,0 +1,41 @@
+# QueryInput
+
+Query definition for a Waylay analytics query. See also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**resource** | **str** | Default resource for the series in the query. | [optional]
+**metric** | **str** | Default metric for the series in the query. | [optional]
+**aggregation** | [**DefaultAggregation**](DefaultAggregation.md) | | [optional]
+**interpolation** | [**DefaultInterpolation**](DefaultInterpolation.md) | | [optional]
+**freq** | [**GroupingInterval**](GroupingInterval.md) | | [optional]
+**var_from** | [**TimeWindowFrom**](TimeWindowFrom.md) | | [optional]
+**until** | [**TimeWindowUntil**](TimeWindowUntil.md) | | [optional]
+**window** | [**Window**](Window.md) | | [optional]
+**periods** | **int** | The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers. | [optional]
+**align** | [**Alignment**](Alignment.md) | | [optional]
+**data** | [**List[SeriesSpec]**](SeriesSpec.md) | List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ). | [optional]
+**render** | [**Render**](Render.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_input import QueryInput
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryInput from a JSON string
+query_input_instance = QueryInput.from_json(json)
+# print the JSON string representation of the object
+print QueryInput.to_json()
+
+# convert the object into a dict
+query_input_dict = query_input_instance.to_dict()
+# create an instance of QueryInput from a dict
+query_input_form_dict = query_input.from_dict(query_input_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryListHALLinks.md b/docs/QueryListHALLinks.md
new file mode 100644
index 0000000..8f92b46
--- /dev/null
+++ b/docs/QueryListHALLinks.md
@@ -0,0 +1,34 @@
+# QueryListHALLinks
+
+HAL Links for a query entity.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**var_self** | [**HALLink**](HALLink.md) | |
+**first** | [**HALLink**](HALLink.md) | | [optional]
+**prev** | [**HALLink**](HALLink.md) | | [optional]
+**next** | [**HALLink**](HALLink.md) | | [optional]
+**last** | [**HALLink**](HALLink.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_list_hal_links import QueryListHALLinks
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryListHALLinks from a JSON string
+query_list_hal_links_instance = QueryListHALLinks.from_json(json)
+# print the JSON string representation of the object
+print QueryListHALLinks.to_json()
+
+# convert the object into a dict
+query_list_hal_links_dict = query_list_hal_links_instance.to_dict()
+# create an instance of QueryListHALLinks from a dict
+query_list_hal_links_form_dict = query_list_hal_links.from_dict(query_list_hal_links_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryListItem.md b/docs/QueryListItem.md
new file mode 100644
index 0000000..b518de0
--- /dev/null
+++ b/docs/QueryListItem.md
@@ -0,0 +1,33 @@
+# QueryListItem
+
+Listing of a query definition item.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**links** | [**QueryHALLinks**](QueryHALLinks.md) | |
+**attrs** | **object** | System provided metadata for the query definition. |
+**name** | **str** | Name of the stored query definition. |
+**meta** | **object** | User metadata for the query definition. | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_list_item import QueryListItem
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryListItem from a JSON string
+query_list_item_instance = QueryListItem.from_json(json)
+# print the JSON string representation of the object
+print QueryListItem.to_json()
+
+# convert the object into a dict
+query_list_item_dict = query_list_item_instance.to_dict()
+# create an instance of QueryListItem from a dict
+query_list_item_form_dict = query_list_item.from_dict(query_list_item_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryOutput.md b/docs/QueryOutput.md
new file mode 100644
index 0000000..8284693
--- /dev/null
+++ b/docs/QueryOutput.md
@@ -0,0 +1,41 @@
+# QueryOutput
+
+Query definition for a Waylay analytics query. See also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**resource** | **str** | Default resource for the series in the query. | [optional]
+**metric** | **str** | Default metric for the series in the query. | [optional]
+**aggregation** | [**DefaultAggregation**](DefaultAggregation.md) | | [optional]
+**interpolation** | [**DefaultInterpolation**](DefaultInterpolation.md) | | [optional]
+**freq** | [**GroupingInterval**](GroupingInterval.md) | | [optional]
+**var_from** | [**TimeWindowFrom**](TimeWindowFrom.md) | | [optional]
+**until** | [**TimeWindowUntil**](TimeWindowUntil.md) | | [optional]
+**window** | [**Window**](Window.md) | | [optional]
+**periods** | **int** | The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers. | [optional]
+**align** | [**Alignment**](Alignment.md) | | [optional]
+**data** | [**List[SeriesSpec]**](SeriesSpec.md) | List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ). | [optional]
+**render** | [**Render**](Render.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_output import QueryOutput
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryOutput from a JSON string
+query_output_instance = QueryOutput.from_json(json)
+# print the JSON string representation of the object
+print QueryOutput.to_json()
+
+# convert the object into a dict
+query_output_dict = query_output_instance.to_dict()
+# create an instance of QueryOutput from a dict
+query_output_form_dict = query_output.from_dict(query_output_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryResponse.md b/docs/QueryResponse.md
new file mode 100644
index 0000000..f61784f
--- /dev/null
+++ b/docs/QueryResponse.md
@@ -0,0 +1,35 @@
+# QueryResponse
+
+Represents a single named query.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**links** | [**QueryHALLinks**](QueryHALLinks.md) | |
+**attrs** | **object** | System provided metadata for the query definition. |
+**name** | **str** | Name of the stored query definition. |
+**meta** | **object** | User metadata for the query definition. | [optional]
+**query** | [**QueryOutput**](QueryOutput.md) | |
+**messages** | [**List[Message]**](Message.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_response import QueryResponse
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryResponse from a JSON string
+query_response_instance = QueryResponse.from_json(json)
+# print the JSON string representation of the object
+print QueryResponse.to_json()
+
+# convert the object into a dict
+query_response_dict = query_response_instance.to_dict()
+# create an instance of QueryResponse from a dict
+query_response_form_dict = query_response.from_dict(query_response_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryResult.md b/docs/QueryResult.md
new file mode 100644
index 0000000..122776a
--- /dev/null
+++ b/docs/QueryResult.md
@@ -0,0 +1,32 @@
+# QueryResult
+
+A json data response. Uses the format as specified by the `render` options of the request (defaults to `COMPACT_WS`). '
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**data** | [**List[ResponseDataSet]**](ResponseDataSet.md) | A list of data sets, each with their own time axis. There will be one dataset for each `role` specified in the query (by default a single `input` role). The data is represented according to the `render` options in the query (default `COMPACT_WS`). |
+**query** | [**QueryInput**](QueryInput.md) | |
+**messages** | [**List[QueryExecutionMessage]**](QueryExecutionMessage.md) | |
+
+## Example
+
+```python
+from waylay.services.queries.models.query_result import QueryResult
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryResult from a JSON string
+query_result_instance = QueryResult.from_json(json)
+# print the JSON string representation of the object
+print QueryResult.to_json()
+
+# convert the object into a dict
+query_result_dict = query_result_instance.to_dict()
+# create an instance of QueryResult from a dict
+query_result_form_dict = query_result.from_dict(query_result_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/QueryUpdateInput.md b/docs/QueryUpdateInput.md
new file mode 100644
index 0000000..df65031
--- /dev/null
+++ b/docs/QueryUpdateInput.md
@@ -0,0 +1,31 @@
+# QueryUpdateInput
+
+Input data to update a query definition.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**meta** | **object** | User metadata for the query definition. | [optional]
+**query** | [**QueryInput**](QueryInput.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.query_update_input import QueryUpdateInput
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of QueryUpdateInput from a JSON string
+query_update_input_instance = QueryUpdateInput.from_json(json)
+# print the JSON string representation of the object
+print QueryUpdateInput.to_json()
+
+# convert the object into a dict
+query_update_input_dict = query_update_input_instance.to_dict()
+# create an instance of QueryUpdateInput from a dict
+query_update_input_form_dict = query_update_input.from_dict(query_update_input_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Render.md b/docs/Render.md
new file mode 100644
index 0000000..b6a762c
--- /dev/null
+++ b/docs/Render.md
@@ -0,0 +1,42 @@
+# Render
+
+Configures the representation of data sets returned by the query API.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**mode** | [**RenderMode**](RenderMode.md) | | [optional]
+**roll_up** | **bool** | move up attributes on rows (or columns) that are the same for all rows (or columns) to a table attribute. Levels enumerated in 'hierarchical' are excluded. | [optional]
+**hierarchical** | [**Hierarchical**](Hierarchical.md) | | [optional]
+**value_key** | **str** | if set, use this key in the value object to report data values | [optional]
+**show_levels** | **bool** | if set, report the levels used in the data values (either hierarchical or flat) | [optional]
+**iso_timestamp** | **bool** | if set, render timestamps in a row or column index with both epoch and iso representations | [optional]
+**row_key** | **str** | if set, use this key as name of the row-dimension for single-dimensional rows | [optional]
+**column_key** | **str** | if set, use this key as name of the column-dimension for single-dimensional columns | [optional]
+**header_array** | [**HeaderArrayOption**](HeaderArrayOption.md) | | [optional]
+**data_axis** | [**DataAxisOption**](DataAxisOption.md) | | [optional]
+**key_seperator** | **str** | character used to concatenate multi-key columns or rows when required | [optional]
+**key_skip_empty** | **bool** | skip empty values in concatenating multi-key column or row headers | [optional]
+**include_window_spec** | **bool** | if set, include window specification in render modes that support it | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.render import Render
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Render from a JSON string
+render_instance = Render.from_json(json)
+# print the JSON string representation of the object
+print Render.to_json()
+
+# convert the object into a dict
+render_dict = render_instance.to_dict()
+# create an instance of Render from a dict
+render_form_dict = render.from_dict(render_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Render1.md b/docs/Render1.md
new file mode 100644
index 0000000..c347a7a
--- /dev/null
+++ b/docs/Render1.md
@@ -0,0 +1,41 @@
+# Render1
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**mode** | [**RenderMode**](RenderMode.md) | | [optional]
+**roll_up** | **bool** | move up attributes on rows (or columns) that are the same for all rows (or columns) to a table attribute. Levels enumerated in 'hierarchical' are excluded. | [optional]
+**hierarchical** | [**Hierarchical**](Hierarchical.md) | | [optional]
+**value_key** | **str** | if set, use this key in the value object to report data values | [optional]
+**show_levels** | **bool** | if set, report the levels used in the data values (either hierarchical or flat) | [optional]
+**iso_timestamp** | **bool** | if set, render timestamps in a row or column index with both epoch and iso representations | [optional]
+**row_key** | **str** | if set, use this key as name of the row-dimension for single-dimensional rows | [optional]
+**column_key** | **str** | if set, use this key as name of the column-dimension for single-dimensional columns | [optional]
+**header_array** | [**HeaderArrayOption**](HeaderArrayOption.md) | | [optional]
+**data_axis** | [**DataAxisOption**](DataAxisOption.md) | | [optional]
+**key_seperator** | **str** | character used to concatenate multi-key columns or rows when required | [optional]
+**key_skip_empty** | **bool** | skip empty values in concatenating multi-key column or row headers | [optional]
+**include_window_spec** | **bool** | if set, include window specification in render modes that support it | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.render1 import Render1
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Render1 from a JSON string
+render1_instance = Render1.from_json(json)
+# print the JSON string representation of the object
+print Render1.to_json()
+
+# convert the object into a dict
+render1_dict = render1_instance.to_dict()
+# create an instance of Render1 from a dict
+render1_form_dict = render1.from_dict(render1_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderMode.md b/docs/RenderMode.md
new file mode 100644
index 0000000..bc5ac43
--- /dev/null
+++ b/docs/RenderMode.md
@@ -0,0 +1,29 @@
+# RenderMode
+
+Render mode configuration keys.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.render_mode import RenderMode
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of RenderMode from a JSON string
+render_mode_instance = RenderMode.from_json(json)
+# print the JSON string representation of the object
+print RenderMode.to_json()
+
+# convert the object into a dict
+render_mode_dict = render_mode_instance.to_dict()
+# create an instance of RenderMode from a dict
+render_mode_form_dict = render_mode.from_dict(render_mode_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf.md b/docs/RenderModeOneOf.md
new file mode 100644
index 0000000..ccc2f18
--- /dev/null
+++ b/docs/RenderModeOneOf.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf
+
+Render rows of timestamp and values. Show column headers. Includes an iso timestamp. ###### options - `iso_timestamp`: `True` - `header_array`: `row` - `roll_up`: `False` - `data_axis`: `column`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf1.md b/docs/RenderModeOneOf1.md
new file mode 100644
index 0000000..0d8f830
--- /dev/null
+++ b/docs/RenderModeOneOf1.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf1
+
+Render rows of timestamp and values. Show column headers. ###### options - `iso_timestamp`: `False` - `header_array`: `row` - `roll_up`: `False` - `data_axis`: `column`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf2.md b/docs/RenderModeOneOf2.md
new file mode 100644
index 0000000..1be600f
--- /dev/null
+++ b/docs/RenderModeOneOf2.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf2
+
+Render rows of timestamp and values. Show column headers. Show the time window attributes. ###### options - `iso_timestamp`: `False` - `header_array`: `row` - `roll_up`: `False` - `data_axis`: `column` - `include_window_spec`: `True`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf3.md b/docs/RenderModeOneOf3.md
new file mode 100644
index 0000000..d616c02
--- /dev/null
+++ b/docs/RenderModeOneOf3.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf3
+
+Render timestamps and each series (column) as a values array. Show column headers. ###### options - `iso_timestamp`: `False` - `header_array`: `row` - `data_axis`: `row` - `roll_up`: `True` - `include_window_spec`: `True`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf4.md b/docs/RenderModeOneOf4.md
new file mode 100644
index 0000000..24aa212
--- /dev/null
+++ b/docs/RenderModeOneOf4.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf4
+
+Renders row index in `rows`, and each series as a values array. The series are prefixed by their series attributes.The `rows` index is prefixed by the labels for these attributes. ###### options - `iso_timestamp`: `True` - `header_array`: `column` - `roll_up`: `False` - `data_axis`: `row`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf5.md b/docs/RenderModeOneOf5.md
new file mode 100644
index 0000000..2facf2d
--- /dev/null
+++ b/docs/RenderModeOneOf5.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf5
+
+Render an object for each observation. Uses flattened keys. ###### options - `iso_timestamp`: `True` - `hierarchical`: `False` - `show_levels`: `True` - `roll_up`: `False`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf6.md b/docs/RenderModeOneOf6.md
new file mode 100644
index 0000000..698dd38
--- /dev/null
+++ b/docs/RenderModeOneOf6.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf6
+
+Render an hierarchical object for each observation. Shows an iso timestamp. ###### options - `iso_timestamp`: `True` - `hierarchical`: `True` - `show_levels`: `True` - `roll_up`: `True`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf7.md b/docs/RenderModeOneOf7.md
new file mode 100644
index 0000000..f162c67
--- /dev/null
+++ b/docs/RenderModeOneOf7.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf7
+
+Render an object with metric keys for each observation. Shows an iso timestamp. ###### options - `iso_timestamp`: `True` - `hierarchical`: `['metric']` - `show_levels`: `False` - `roll_up`: `True` - `key_skip_empty`: `True`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf8.md b/docs/RenderModeOneOf8.md
new file mode 100644
index 0000000..382a718
--- /dev/null
+++ b/docs/RenderModeOneOf8.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf8
+
+Render in an object format compatible with the `/data/v1/events` upload. ###### options - `iso_timestamp`: `False` - `hierarchical`: `False` - `show_levels`: `False` - `roll_up`: `True`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RenderModeOneOf9.md b/docs/RenderModeOneOf9.md
new file mode 100644
index 0000000..c3f18b8
--- /dev/null
+++ b/docs/RenderModeOneOf9.md
@@ -0,0 +1,12 @@
+# RenderModeOneOf9
+
+Render in csv format with row headers. ###### options - `iso_timestamp`: `False`
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ResponseDataSet.md b/docs/ResponseDataSet.md
new file mode 100644
index 0000000..ebc7a45
--- /dev/null
+++ b/docs/ResponseDataSet.md
@@ -0,0 +1,35 @@
+# ResponseDataSet
+
+Result timeseries data set, with one time dimension.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**attributes** | [**DataSetAttributes**](DataSetAttributes.md) | | [optional]
+**window_spec** | [**DataSetWindow**](DataSetWindow.md) | | [optional]
+**data_axis** | [**ColumnDataSetDataAxis**](ColumnDataSetDataAxis.md) | | [optional] [default to ColumnDataSetDataAxis.ROW]
+**columns** | [**List[ColumnHeadersInner]**](ColumnHeadersInner.md) | Header Attributes for the column data. The initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps). The remaining object-valued column headers identify and describe the actual series data. |
+**data** | [**List[ObjectData]**](ObjectData.md) | |
+**rows** | [**List[RowHeadersInner]**](RowHeadersInner.md) | Header Attributes for the index data. The initial string-valued headers (normally `resource`, `metric`,`aggregation`) indicate that row to contain series attributes. The remaining object-valued row headers contain the index data. |
+
+## Example
+
+```python
+from waylay.services.queries.models.response_data_set import ResponseDataSet
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ResponseDataSet from a JSON string
+response_data_set_instance = ResponseDataSet.from_json(json)
+# print the JSON string representation of the object
+print ResponseDataSet.to_json()
+
+# convert the object into a dict
+response_data_set_dict = response_data_set_instance.to_dict()
+# create an instance of ResponseDataSet from a dict
+response_data_set_form_dict = response_data_set.from_dict(response_data_set_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RowDataSet.md b/docs/RowDataSet.md
new file mode 100644
index 0000000..fd835e2
--- /dev/null
+++ b/docs/RowDataSet.md
@@ -0,0 +1,34 @@
+# RowDataSet
+
+Row-oriented dataset. Timeseries data layout with a column header and a data row per timestamp. Result for render options `data_axis=column` and `header_array=row`.\",
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**attributes** | [**DataSetAttributes**](DataSetAttributes.md) | | [optional]
+**window_spec** | [**DataSetWindow**](DataSetWindow.md) | | [optional]
+**data_axis** | [**RowDataSetDataAxis**](RowDataSetDataAxis.md) | | [optional] [default to RowDataSetDataAxis.COLUMN]
+**columns** | [**List[ColumnHeadersInner]**](ColumnHeadersInner.md) | Header Attributes for the column data. The initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps). The remaining object-valued column headers identify and describe the actual series data. |
+**data** | **List[List[Datum]]** | |
+
+## Example
+
+```python
+from waylay.services.queries.models.row_data_set import RowDataSet
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of RowDataSet from a JSON string
+row_data_set_instance = RowDataSet.from_json(json)
+# print the JSON string representation of the object
+print RowDataSet.to_json()
+
+# convert the object into a dict
+row_data_set_dict = row_data_set_instance.to_dict()
+# create an instance of RowDataSet from a dict
+row_data_set_form_dict = row_data_set.from_dict(row_data_set_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RowDataSetDataAxis.md b/docs/RowDataSetDataAxis.md
new file mode 100644
index 0000000..f91f0d2
--- /dev/null
+++ b/docs/RowDataSetDataAxis.md
@@ -0,0 +1,11 @@
+# RowDataSetDataAxis
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RowHeader.md b/docs/RowHeader.md
new file mode 100644
index 0000000..8bc5660
--- /dev/null
+++ b/docs/RowHeader.md
@@ -0,0 +1,31 @@
+# RowHeader
+
+Index entry attributes. Attributes for a timestamp index entry.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**timestamp** | **int** | Unix epoch milliseconds timestamp. |
+**timestamp_iso** | **datetime** | ISO8601 rendering of the timestamp, present when `render.iso_timestamp=true` | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.row_header import RowHeader
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of RowHeader from a JSON string
+row_header_instance = RowHeader.from_json(json)
+# print the JSON string representation of the object
+print RowHeader.to_json()
+
+# convert the object into a dict
+row_header_dict = row_header_instance.to_dict()
+# create an instance of RowHeader from a dict
+row_header_form_dict = row_header.from_dict(row_header_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/RowHeadersInner.md b/docs/RowHeadersInner.md
new file mode 100644
index 0000000..798e419
--- /dev/null
+++ b/docs/RowHeadersInner.md
@@ -0,0 +1,30 @@
+# RowHeadersInner
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**timestamp** | **int** | Unix epoch milliseconds timestamp. |
+**timestamp_iso** | **datetime** | ISO8601 rendering of the timestamp, present when `render.iso_timestamp=true` | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.row_headers_inner import RowHeadersInner
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of RowHeadersInner from a JSON string
+row_headers_inner_instance = RowHeadersInner.from_json(json)
+# print the JSON string representation of the object
+print RowHeadersInner.to_json()
+
+# convert the object into a dict
+row_headers_inner_dict = row_headers_inner_instance.to_dict()
+# create an instance of RowHeadersInner from a dict
+row_headers_inner_form_dict = row_headers_inner.from_dict(row_headers_inner_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/SeriesDataSet.md b/docs/SeriesDataSet.md
new file mode 100644
index 0000000..c1e6e70
--- /dev/null
+++ b/docs/SeriesDataSet.md
@@ -0,0 +1,34 @@
+# SeriesDataSet
+
+Column-oriented dataset. Timeseries data layout with a column header and a seperate data array for the time index and each series. Result for render options `data_axis=row` and `header_array=row`.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**attributes** | [**DataSetAttributes**](DataSetAttributes.md) | | [optional]
+**window_spec** | [**DataSetWindow**](DataSetWindow.md) | | [optional]
+**data_axis** | [**ColumnDataSetDataAxis**](ColumnDataSetDataAxis.md) | | [optional] [default to ColumnDataSetDataAxis.ROW]
+**columns** | [**List[ColumnHeadersInner]**](ColumnHeadersInner.md) | Header Attributes for the column data. The initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps). The remaining object-valued column headers identify and describe the actual series data. |
+**data** | **List[List[Datum]]** | |
+
+## Example
+
+```python
+from waylay.services.queries.models.series_data_set import SeriesDataSet
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of SeriesDataSet from a JSON string
+series_data_set_instance = SeriesDataSet.from_json(json)
+# print the JSON string representation of the object
+print SeriesDataSet.to_json()
+
+# convert the object into a dict
+series_data_set_dict = series_data_set_instance.to_dict()
+# create an instance of SeriesDataSet from a dict
+series_data_set_form_dict = series_data_set.from_dict(series_data_set_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/SeriesSpec.md b/docs/SeriesSpec.md
new file mode 100644
index 0000000..44448cb
--- /dev/null
+++ b/docs/SeriesSpec.md
@@ -0,0 +1,34 @@
+# SeriesSpec
+
+Query specification for a single series.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**name** | **str** | Optional alias name for the series. This name is used when exporting the dataset to CSV format. | [optional]
+**resource** | **str** | Resource id for the series, required unless it is specified as a query default. | [optional]
+**metric** | **str** | Metric name for the series, required unless it is specified as a query default. | [optional]
+**aggregration** | [**AggregationMethod**](AggregationMethod.md) | | [optional]
+**interpolation** | [**Interpolation**](Interpolation.md) | | [optional]
+
+## Example
+
+```python
+from waylay.services.queries.models.series_spec import SeriesSpec
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of SeriesSpec from a JSON string
+series_spec_instance = SeriesSpec.from_json(json)
+# print the JSON string representation of the object
+print SeriesSpec.to_json()
+
+# convert the object into a dict
+series_spec_dict = series_spec_instance.to_dict()
+# create an instance of SeriesSpec from a dict
+series_spec_form_dict = series_spec.from_dict(series_spec_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/StatusApi.md b/docs/StatusApi.md
new file mode 100644
index 0000000..9c03208
--- /dev/null
+++ b/docs/StatusApi.md
@@ -0,0 +1,71 @@
+# waylay.services.queries.StatusApi
+
+All URIs are relative to *https://api.waylay.io*
+
+Method | HTTP request | Description
+------------- | ------------- | -------------
+[**get**](StatusApi.md#get) | **GET** /queries/v1/queries/v1 | Get Version And Health
+
+# **get**
+> get(
+> headers
+> ) -> Dict[str, str]
+
+Get Version And Health
+
+Get the version and health status for waylay-query.
+
+### Example
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+try:
+ # Get Version And Health
+ # calls `GET /queries/v1/queries/v1`
+ api_response = await waylay_client.queries.status.get(
+ )
+ print("The response of queries.status.get:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.status.get: %s\n" % e)
+```
+
+### Endpoint
+```
+GET /queries/v1/queries/v1
+```
+### Parameters
+
+This endpoint does not need any parameter.
+**headers** | [HeaderTypes](Operation.md#req_headers) | request headers | |
+
+### Return type
+
+Selected path param | Raw response param | Return Type | Description | Links
+------------------- | ------------------ | ------------ | ----------- | -----
+Literal[""] _(default)_ | False _(default)_ | **`Dict[str, str]`** | |
+str | False _(default)_ | **`Any`** | If any other string value for the selected path is provided, the exact type of the response will only be known at runtime. |
+/ | True | `Response` | The raw http response object.
+
+### HTTP request headers
+
+ - **Content-Type**: Not defined
+ - **Accept**: application/json
+
+### HTTP response details
+
+| Status code | Description | Response headers |
+|-------------|-------------|------------------|
+**200** | Successful Response | - |
+
+[[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
+
diff --git a/docs/TimeWindowFrom.md b/docs/TimeWindowFrom.md
new file mode 100644
index 0000000..b21f340
--- /dev/null
+++ b/docs/TimeWindowFrom.md
@@ -0,0 +1,29 @@
+# TimeWindowFrom
+
+The start of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.time_window_from import TimeWindowFrom
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of TimeWindowFrom from a JSON string
+time_window_from_instance = TimeWindowFrom.from_json(json)
+# print the JSON string representation of the object
+print TimeWindowFrom.to_json()
+
+# convert the object into a dict
+time_window_from_dict = time_window_from_instance.to_dict()
+# create an instance of TimeWindowFrom from a dict
+time_window_from_form_dict = time_window_from.from_dict(time_window_from_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/TimeWindowUntil.md b/docs/TimeWindowUntil.md
new file mode 100644
index 0000000..a3e9cd2
--- /dev/null
+++ b/docs/TimeWindowUntil.md
@@ -0,0 +1,29 @@
+# TimeWindowUntil
+
+The end (not-inclusive) of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.time_window_until import TimeWindowUntil
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of TimeWindowUntil from a JSON string
+time_window_until_instance = TimeWindowUntil.from_json(json)
+# print the JSON string representation of the object
+print TimeWindowUntil.to_json()
+
+# convert the object into a dict
+time_window_until_dict = time_window_until_instance.to_dict()
+# create an instance of TimeWindowUntil from a dict
+time_window_until_form_dict = time_window_until.from_dict(time_window_until_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/ValidationError.md b/docs/ValidationError.md
new file mode 100644
index 0000000..48c605c
--- /dev/null
+++ b/docs/ValidationError.md
@@ -0,0 +1,31 @@
+# ValidationError
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+**loc** | [**List[LocationInner]**](LocationInner.md) | |
+**msg** | **str** | |
+**type** | **str** | |
+
+## Example
+
+```python
+from waylay.services.queries.models.validation_error import ValidationError
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of ValidationError from a JSON string
+validation_error_instance = ValidationError.from_json(json)
+# print the JSON string representation of the object
+print ValidationError.to_json()
+
+# convert the object into a dict
+validation_error_dict = validation_error_instance.to_dict()
+# create an instance of ValidationError from a dict
+validation_error_form_dict = validation_error.from_dict(validation_error_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/Window.md b/docs/Window.md
new file mode 100644
index 0000000..f11e4da
--- /dev/null
+++ b/docs/Window.md
@@ -0,0 +1,29 @@
+# Window
+
+The absolute size of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.window import Window
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of Window from a JSON string
+window_instance = Window.from_json(json)
+# print the JSON string representation of the object
+print Window.to_json()
+
+# convert the object into a dict
+window_dict = window_instance.to_dict()
+# create an instance of Window from a dict
+window_form_dict = window.from_dict(window_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/docs/WindowOverride.md b/docs/WindowOverride.md
new file mode 100644
index 0000000..b5fe7ca
--- /dev/null
+++ b/docs/WindowOverride.md
@@ -0,0 +1,28 @@
+# WindowOverride
+
+
+## Properties
+
+Name | Type | Description | Notes
+------------ | ------------- | ------------- | -------------
+
+## Example
+
+```python
+from waylay.services.queries.models.window_override import WindowOverride
+
+# TODO update the JSON string below
+json = "{}"
+# create an instance of WindowOverride from a JSON string
+window_override_instance = WindowOverride.from_json(json)
+# print the JSON string representation of the object
+print WindowOverride.to_json()
+
+# convert the object into a dict
+window_override_dict = window_override_instance.to_dict()
+# create an instance of WindowOverride from a dict
+window_override_form_dict = window_override.from_dict(window_override_dict)
+```
+[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
+
+
diff --git a/openapi/queries.openapi.yaml b/openapi/queries.openapi.yaml
new file mode 100644
index 0000000..de5d5c1
--- /dev/null
+++ b/openapi/queries.openapi.yaml
@@ -0,0 +1,5244 @@
+openapi: 3.1.0
+info:
+ title: 'Waylay Query: timeseries queries (v1 protocol)'
+ description: '
+
+ Execute and store queries on the Waylay timeseries.
+
+
+ Protocol version: v1.'
+ version: 0.5.0
+paths:
+ /queries/v1/queries/v1:
+ get:
+ tags:
+ - Status
+ summary: Get Version And Health
+ description: Get the version and health status for waylay-query.
+ operationId: get_version_and_health_queries_v1_get
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ additionalProperties:
+ type: string
+ type: object
+ title: Response Get Version And Health Queries V1 Get
+ security: &id001
+ - waylayApiKeySecret: []
+ /queries/v1/queries/v1/data:
+ post:
+ tags:
+ - Execute
+ summary: Execute Query
+ description: 'Execute a timeseries query.
+
+
+ Executes the timeseries query specified in the request body,
+
+ after applying any overrides from the url parameters.'
+ operationId: execute_query_queries_v1_data_post
+ parameters:
+ - name: resource
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Resource
+ description: Default Resource Override.
+ examples:
+ - 13efb488-75ac-4dac-828a-d49c5c2ebbfc
+ description: Default Resource Override.
+ - name: metric
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Metric
+ description: Default Metric Override.
+ examples:
+ - temperature
+ - uptime
+ description: Default Metric Override.
+ - name: aggregation
+ in: query
+ required: false
+ schema:
+ title: Aggregation Override.
+ $ref: '#/components/schemas/AggregationMethod'
+ - name: interpolation
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Override.
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation Override.
+ title: Interpolation
+ - name: freq
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Grouping Interval Override
+ description: Override for the `freq` query attribute.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ - title: Inferred Frequency
+ type: string
+ description: When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be used to
+ regularize the time series
+ const: inferred
+ description: Override for the `freq` query attribute.
+ - name: from
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ - name: until
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ - name: window
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Window Override.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ - name: periods
+ in: query
+ required: false
+ schema:
+ type: integer
+ title: Periods Override.
+ - name: render
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/_RenderMode'
+ title: Render Mode
+ - $ref: '#/components/schemas/Render'
+ title: Render Override.
+ title: Render
+ - name: accept
+ in: header
+ required: false
+ schema:
+ anyOf:
+ - type: string
+ - type: 'null'
+ title: Accept header
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Query-Input'
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResult'
+ examples:
+ default:
+ summary: Query response example for `?render=DEFAULT`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 12.67
+ - 80.0
+ - 12.78
+ - 83.125
+ - - 1718006400000
+ - 13.74
+ - 88.0
+ - 13.06
+ - 80.875
+ - - 1718035200000
+ - null
+ - null
+ - 13.35
+ - 78.5
+ - - 1718064000000
+ - 7.49
+ - 88.0
+ - 13.0
+ - 81.875
+ - - 1718092800000
+ - null
+ - null
+ - 14.84
+ - 62.375
+ - - 1718121600000
+ - null
+ - null
+ - 14.59
+ - 76.5
+ - - 1718150400000
+ - 9.34
+ - 90.0
+ - 13.08
+ - 85.375
+ - - 1718179200000
+ - null
+ - null
+ - 15.41
+ - 61.75
+ - - 1718208000000
+ - null
+ - null
+ - 15.1
+ - 74.0
+ - - 1718236800000
+ - 9.03
+ - 90.0
+ - 14.99
+ - 82.875
+ - - 1718265600000
+ - null
+ - null
+ - 18.64
+ - 57.5
+ - - 1718294400000
+ - null
+ - null
+ - 18.35
+ - 62.5
+ - - 1718323200000
+ - 12.98
+ - 80.0
+ - 13.02
+ - 81.625
+ - - 1718352000000
+ - null
+ - null
+ - 18.19
+ - 81.5
+ - - 1718380800000
+ - null
+ - null
+ - 17.75
+ - 88.875
+ - - 1718409600000
+ - 11.8
+ - 88.0
+ - 13.4
+ - 87.25
+ - - 1718438400000
+ - null
+ - null
+ - 16.53
+ - 64.5
+ - - 1718467200000
+ - null
+ - null
+ - 14.69
+ - 84.875
+ - - 1718496000000
+ - 12.12
+ - 89.0
+ - 13.77
+ - 87.875
+ - - 1718524800000
+ - null
+ - null
+ - 19.27
+ - 73.125
+ - - 1718553600000
+ - null
+ - null
+ - 19.12
+ - 69.75
+ data_axis: column
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.886997+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.887995+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ hier_dict:
+ summary: Query response example for `?render=HIER_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80.0
+ temperature:
+ max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 83.125
+ temperature:
+ max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88.0
+ temperature:
+ max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 80.875
+ temperature:
+ max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 78.5
+ temperature:
+ max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88.0
+ temperature:
+ max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.875
+ temperature:
+ max: 13.0
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.375
+ temperature:
+ max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 76.5
+ temperature:
+ max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90.0
+ temperature:
+ max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 85.375
+ temperature:
+ max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 61.75
+ temperature:
+ max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 74.0
+ temperature:
+ max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90.0
+ temperature:
+ max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 82.875
+ temperature:
+ max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 57.5
+ temperature:
+ max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.5
+ temperature:
+ max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80.0
+ temperature:
+ max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.625
+ temperature:
+ max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.5
+ temperature:
+ max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 88.875
+ temperature:
+ max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88.0
+ temperature:
+ max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.25
+ temperature:
+ max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 64.5
+ temperature:
+ max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 84.875
+ temperature:
+ max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 89.0
+ temperature:
+ max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.875
+ temperature:
+ max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 73.125
+ temperature:
+ max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 69.75
+ temperature:
+ max: 19.12
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.698912+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.699998+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HIER_DICT
+ upload:
+ summary: Query response example for `?render=UPLOAD`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74.0
+ - role: input
+ timestamp: 1718236800000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.128684+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.129391+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: UPLOAD
+ flat_dict:
+ summary: Query response example for `?render=FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74.0
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.689972+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.691573+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: FLAT_DICT
+ metric_flat_dict:
+ summary: Query response example for `?render=METRIC_FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 83.125
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 80.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 78.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.0
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.375
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 76.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 85.375
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 61.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 74.0
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 82.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 57.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.625
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 88.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.25
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 64.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 84.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 89.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 73.125
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 69.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.12
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.460083+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.461241+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: METRIC_FLAT_DICT
+ header_column:
+ summary: Query response example for `?render=HEADER_COLUMN`
+ value:
+ data:
+ - rows:
+ - resource
+ - metric
+ - aggregation
+ - timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ - timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ - timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ - timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ - timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ - timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ - timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ - timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ - timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ - timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ - timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ - timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ - timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ - timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ - timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ - timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ - timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ - timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ - timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ - timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ - timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ data:
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - temperature
+ - max
+ - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - humidity
+ - mean
+ - 80.0
+ - 88.0
+ - null
+ - 88.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 80.0
+ - null
+ - null
+ - 88.0
+ - null
+ - null
+ - 89.0
+ - null
+ - null
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - temperature
+ - max
+ - 12.78
+ - 13.06
+ - 13.35
+ - 13.0
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - humidity
+ - mean
+ - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74.0
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.733676+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.734610+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HEADER_COLUMN
+ series:
+ summary: Query response example for `?render=SERIES`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 1718006400000
+ - 1718035200000
+ - 1718064000000
+ - 1718092800000
+ - 1718121600000
+ - 1718150400000
+ - 1718179200000
+ - 1718208000000
+ - 1718236800000
+ - 1718265600000
+ - 1718294400000
+ - 1718323200000
+ - 1718352000000
+ - 1718380800000
+ - 1718409600000
+ - 1718438400000
+ - 1718467200000
+ - 1718496000000
+ - 1718524800000
+ - 1718553600000
+ - - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 80.0
+ - 88.0
+ - null
+ - 88.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 80.0
+ - null
+ - null
+ - 88.0
+ - null
+ - null
+ - 89.0
+ - null
+ - null
+ - - 12.78
+ - 13.06
+ - 13.35
+ - 13.0
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74.0
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.240450+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.241481+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: SERIES
+ text/csv:
+ schema:
+ title: CSV timeseries data response.
+ description: 'A CSV representation of the query result,
+
+ - the first `timestamp` column contains event timestamp in iso
+ format
+
+ - remaining columns contain the (aggregated or non-aggregated) series
+ values, using a `{resource_id}/{metric}` or `{resource_id}/{metric}/{aggregation}` header.'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+ /queries/v1/queries/v1/data/{query_name}:
+ get:
+ tags:
+ - Execute
+ summary: Execute Named Query
+ description: 'Execute a named timeseries query.
+
+
+ Retrieves a stored query definition by name,
+
+ applies overrides from the url parameters, and executes it.'
+ operationId: executeByName_query_queries_v1_data__query_name__get
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ - name: resource
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Resource
+ description: Default Resource Override.
+ examples:
+ - 13efb488-75ac-4dac-828a-d49c5c2ebbfc
+ description: Default Resource Override.
+ - name: metric
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Metric
+ description: Default Metric Override.
+ examples:
+ - temperature
+ - uptime
+ description: Default Metric Override.
+ - name: aggregation
+ in: query
+ required: false
+ schema:
+ title: Aggregation Override.
+ $ref: '#/components/schemas/AggregationMethod'
+ - name: interpolation
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Override.
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation Override.
+ title: Interpolation
+ - name: freq
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Grouping Interval Override
+ description: Override for the `freq` query attribute.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ - title: Inferred Frequency
+ type: string
+ description: When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be used to
+ regularize the time series
+ const: inferred
+ description: Override for the `freq` query attribute.
+ - name: from
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ - name: until
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ - name: window
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Window Override.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ - name: periods
+ in: query
+ required: false
+ schema:
+ type: integer
+ title: Periods Override.
+ - name: render
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/_RenderMode'
+ title: Render Mode
+ - $ref: '#/components/schemas/Render'
+ title: Render Override.
+ title: Render
+ - name: accept
+ in: header
+ required: false
+ schema:
+ anyOf:
+ - type: string
+ - type: 'null'
+ title: Accept header
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResult'
+ examples:
+ default:
+ summary: Query response example for `?render=DEFAULT`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 12.67
+ - 80.0
+ - 12.78
+ - 83.125
+ - - 1718006400000
+ - 13.74
+ - 88.0
+ - 13.06
+ - 80.875
+ - - 1718035200000
+ - null
+ - null
+ - 13.35
+ - 78.5
+ - - 1718064000000
+ - 7.49
+ - 88.0
+ - 13.0
+ - 81.875
+ - - 1718092800000
+ - null
+ - null
+ - 14.84
+ - 62.375
+ - - 1718121600000
+ - null
+ - null
+ - 14.59
+ - 76.5
+ - - 1718150400000
+ - 9.34
+ - 90.0
+ - 13.08
+ - 85.375
+ - - 1718179200000
+ - null
+ - null
+ - 15.41
+ - 61.75
+ - - 1718208000000
+ - null
+ - null
+ - 15.1
+ - 74.0
+ - - 1718236800000
+ - 9.03
+ - 90.0
+ - 14.99
+ - 82.875
+ - - 1718265600000
+ - null
+ - null
+ - 18.64
+ - 57.5
+ - - 1718294400000
+ - null
+ - null
+ - 18.35
+ - 62.5
+ - - 1718323200000
+ - 12.98
+ - 80.0
+ - 13.02
+ - 81.625
+ - - 1718352000000
+ - null
+ - null
+ - 18.19
+ - 81.5
+ - - 1718380800000
+ - null
+ - null
+ - 17.75
+ - 88.875
+ - - 1718409600000
+ - 11.8
+ - 88.0
+ - 13.4
+ - 87.25
+ - - 1718438400000
+ - null
+ - null
+ - 16.53
+ - 64.5
+ - - 1718467200000
+ - null
+ - null
+ - 14.69
+ - 84.875
+ - - 1718496000000
+ - 12.12
+ - 89.0
+ - 13.77
+ - 87.875
+ - - 1718524800000
+ - null
+ - null
+ - 19.27
+ - 73.125
+ - - 1718553600000
+ - null
+ - null
+ - 19.12
+ - 69.75
+ data_axis: column
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.886997+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.887995+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ hier_dict:
+ summary: Query response example for `?render=HIER_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80.0
+ temperature:
+ max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 83.125
+ temperature:
+ max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88.0
+ temperature:
+ max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 80.875
+ temperature:
+ max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 78.5
+ temperature:
+ max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88.0
+ temperature:
+ max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.875
+ temperature:
+ max: 13.0
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.375
+ temperature:
+ max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 76.5
+ temperature:
+ max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90.0
+ temperature:
+ max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 85.375
+ temperature:
+ max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 61.75
+ temperature:
+ max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 74.0
+ temperature:
+ max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90.0
+ temperature:
+ max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 82.875
+ temperature:
+ max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 57.5
+ temperature:
+ max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.5
+ temperature:
+ max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80.0
+ temperature:
+ max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.625
+ temperature:
+ max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.5
+ temperature:
+ max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 88.875
+ temperature:
+ max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88.0
+ temperature:
+ max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.25
+ temperature:
+ max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 64.5
+ temperature:
+ max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 84.875
+ temperature:
+ max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 89.0
+ temperature:
+ max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.875
+ temperature:
+ max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 73.125
+ temperature:
+ max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 69.75
+ temperature:
+ max: 19.12
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.698912+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.699998+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HIER_DICT
+ upload:
+ summary: Query response example for `?render=UPLOAD`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74.0
+ - role: input
+ timestamp: 1718236800000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.128684+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.129391+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: UPLOAD
+ flat_dict:
+ summary: Query response example for `?render=FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74.0
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.689972+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.691573+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: FLAT_DICT
+ metric_flat_dict:
+ summary: Query response example for `?render=METRIC_FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 83.125
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 80.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 78.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.0
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.375
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 76.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 85.375
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 61.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 74.0
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 82.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 57.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.625
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 88.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.25
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 64.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 84.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 89.0
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 73.125
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 69.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.12
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.460083+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.461241+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: METRIC_FLAT_DICT
+ header_column:
+ summary: Query response example for `?render=HEADER_COLUMN`
+ value:
+ data:
+ - rows:
+ - resource
+ - metric
+ - aggregation
+ - timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ - timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ - timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ - timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ - timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ - timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ - timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ - timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ - timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ - timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ - timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ - timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ - timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ - timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ - timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ - timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ - timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ - timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ - timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ - timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ - timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ data:
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - temperature
+ - max
+ - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - humidity
+ - mean
+ - 80.0
+ - 88.0
+ - null
+ - 88.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 80.0
+ - null
+ - null
+ - 88.0
+ - null
+ - null
+ - 89.0
+ - null
+ - null
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - temperature
+ - max
+ - 12.78
+ - 13.06
+ - 13.35
+ - 13.0
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - humidity
+ - mean
+ - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74.0
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.733676+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.734610+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HEADER_COLUMN
+ series:
+ summary: Query response example for `?render=SERIES`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 1718006400000
+ - 1718035200000
+ - 1718064000000
+ - 1718092800000
+ - 1718121600000
+ - 1718150400000
+ - 1718179200000
+ - 1718208000000
+ - 1718236800000
+ - 1718265600000
+ - 1718294400000
+ - 1718323200000
+ - 1718352000000
+ - 1718380800000
+ - 1718409600000
+ - 1718438400000
+ - 1718467200000
+ - 1718496000000
+ - 1718524800000
+ - 1718553600000
+ - - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 80.0
+ - 88.0
+ - null
+ - 88.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 90.0
+ - null
+ - null
+ - 80.0
+ - null
+ - null
+ - 88.0
+ - null
+ - null
+ - 89.0
+ - null
+ - null
+ - - 12.78
+ - 13.06
+ - 13.35
+ - 13.0
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74.0
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'temperature',
+ 'max'), aggregated on freq PT8H contains 26 null values. These
+ will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.240450+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: Series ('06e267b9-8714-4c58-ac53-df6a291b59dc', 'humidity',
+ 'mean'), aggregated on freq PT8H contains 26 null values.
+ These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.241481+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: SERIES
+ text/csv:
+ schema:
+ title: CSV timeseries data response.
+ description: 'A CSV representation of the query result,
+
+ - the first `timestamp` column contains event timestamp in iso
+ format
+
+ - remaining columns contain the (aggregated or non-aggregated) series
+ values, using a `{resource_id}/{metric}` or `{resource_id}/{metric}/{aggregation}` header.'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+ /queries/v1/queries/v1/query:
+ get:
+ tags:
+ - Manage
+ summary: List Queries
+ description: List named queries.
+ operationId: list_queries_queries_v1_query_get
+ parameters:
+ - name: q
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Filter condition
+ description: The QDSL filter condition for the stored queries. Note that
+ this value needs to be escaped when passed as an url paramater.
+ default: ''
+ description: The QDSL filter condition for the stored queries. Note that this
+ value needs to be escaped when passed as an url paramater.
+ examples:
+ resource:
+ summary: resource
+ description: Filter queries that reference a given resource id.
+ value: resource:APL4995
+ user:
+ summary: current user
+ description: Filter queries on the user that created or updated the item.
+ value: user:@me
+ created:
+ summary: created after
+ description: Filter on a creation timestamp interval.
+ value: created:2020-02-20/
+ modified:
+ summary: last modified more than 2 days ago
+ description: Filter on a modification timestamp interval.
+ value: modified:/now-P2D
+ meta_contains:
+ summary: metadata contains
+ description: Filter on a string contained in a metadata property.
+ value: 'meta.comments:contains(''{ "topic" : "general" }'''
+ meta_like:
+ summary: metadata matches
+ description: Filter on a pattern in a metadata property.
+ value: meta.description:like(*http*waylay.io*)
+ tag:
+ summary: tag combination
+ description: Filter queries on tag combinations.
+ value: tag:stable,demo,release_15 tag:demo,release_16 -tag:dummy
+ - name: limit
+ in: query
+ required: false
+ schema:
+ type: integer
+ maximum: 100
+ title: Page size
+ description: Maximal number of items return in one response.
+ default: 10
+ description: Maximal number of items return in one response.
+ - name: offset
+ in: query
+ required: false
+ schema:
+ type: integer
+ title: Page offset
+ description: Numbers of items to skip before listing results in the response
+ page.
+ default: 0
+ description: Numbers of items to skip before listing results in the response
+ page.
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueriesListResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+ post:
+ tags:
+ - Manage
+ summary: Post Query
+ description: Create a new named query.
+ operationId: create_query_queries_v1_query_post
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryEntityInput'
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+ /queries/v1/queries/v1/query/{query_name}:
+ get:
+ tags:
+ - Manage
+ summary: Get Query
+ description: Get the definition of a named query.
+ operationId: get_query_queries_v1_query__query_name__get
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ description: Name of the stored query.
+ description: Name of the stored query.
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+ put:
+ tags:
+ - Manage
+ summary: Update Query
+ description: Create or update a named query definition.
+ operationId: update_query_queries_v1_query__query_name__put
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ description: Name of the stored query.
+ description: Name of the stored query.
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/QueryUpdateInput'
+ - $ref: '#/components/schemas/Query-Input'
+ title: Query Definition
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+ delete:
+ tags:
+ - Manage
+ summary: Remove Query
+ description: Remove definition of a named query.
+ operationId: remove_query_queries_v1_query__query_name__delete
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ description: Name of the stored query.
+ description: Name of the stored query.
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/DeleteResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security: *id001
+components:
+ schemas:
+ AggregationMethod:
+ type: string
+ title: Aggregation Method.
+ oneOf:
+ - const: first
+ title: First
+ description: Use the first value (in time) to represent all data for the sample
+ interval.
+ - const: last
+ title: Last
+ description: Use the last value (in time) to represent all data for the sample
+ interval.
+ - const: mean
+ title: Mean
+ description: 'Aggregate data by the mean value: The sum of values divided
+ by number of observations.'
+ - const: median
+ title: Median
+ description: 'Aggregate data by the median value: The n/2-th value when ordered,
+ the average of the (n-1)/2-th and (n+1)/2-th value when n is uneven.'
+ - const: sum
+ title: Sum
+ description: The sum of all values summarizes the data for the sample interval.
+ - const: count
+ title: Count
+ description: Use the count of observations in the sample interval.
+ - const: std
+ title: Standard Deviation
+ description: Use the standard deviation of all observations in the sample
+ interval.
+ - const: max
+ title: Maximum
+ description: Use the maximum of all values in the sample interval.
+ - const: min
+ title: Minimum
+ description: Use the minimum of all values in the sample interval.
+ - title: Precentile
+ description: Aggregate data by the p-th percentile, where p is a number between
+ 0 and 1.
+ pattern: ^percentile\((1(\.0*)?)|(0\.[0-9]*)\)$
+ example: percentile(0.02)
+ AlignAt:
+ type: string
+ enum:
+ - grid
+ - boundary
+ - from
+ - until
+ title: AlignAt
+ description: "Possible values for `align.at`.\n\n* 'grid' Align to a fixed grid\
+ \ (possibly using timezone information)\n* 'from' Align a the `from` boundary\n\
+ * 'until' Align a the `until` boundary\n* 'boundary' Align a the `from` boundary\
+ \ if specified,\n otherwise the `until` boundary.\n\nWhen not specified,\
+ \ 'grid' is used."
+ AlignShift:
+ type: string
+ enum:
+ - backward
+ - forward
+ - wrap
+ title: AlignShift
+ description: "Possible values for `align.shift`.\n\n* 'backward': keep the window\
+ \ size of the original interval specification,\n shifting back.\n* 'forward':\
+ \ keep the window size of the original interval specification,\n shifting\
+ \ forward.\n* 'wrap': enlarge the window size to include all of the original\
+ \ interval.\n\nWhen not specified, 'backward' is used."
+ Alignment:
+ properties:
+ at:
+ $ref: '#/components/schemas/AlignAt'
+ title: Align At
+ description: Method used to align the aggregation grid. The default value
+ is system-dependent (normally `grid`)
+ shift:
+ $ref: '#/components/schemas/AlignShift'
+ title: Align Shift
+ description: '
+
+ Specifies in what direction the query window is shifted
+
+ to match the alignment specification.
+
+ When not specified, defaults are:
+
+ - `backward` when only the `from` boundary is specified.
+
+ - `forward` when only the `until` boundary is specified.
+
+ - `wrap` otherwise (_none_ or _both_ boundaries specified).
+
+ '
+ freq:
+ oneOf:
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ - type: string
+ const: inferred
+ title: Inferred Frequency
+ description: When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be used to
+ regularize the time series
+ type: string
+ title: Alignment Grid interval.
+ description: "\nDefines the grid used to align the aggregation window.\n\
+ The window will align at whole-unit multiples of this interval.\n\nFor\
+ \ intervals like `PT1D`, that are timezone-dependent, use the \n`align.timezone`\
+ \ to fix the absolute timestamp of the grid boundaries.\n\nIf not specified,\
+ \ defaults to the `freq` aggregation interval.\n"
+ timezone:
+ oneOf:
+ - type: string
+ title: Timezone Identifier
+ description: '[ICANN timezone identifier](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)'
+ - type: string
+ pattern: (+|-)\d\d:\d\d
+ title: UTC Offset
+ description: '[UTC offset](https://en.wikipedia.org/wiki/UTC_offset)'
+ type: string
+ title: Alignment Timezone.
+ description: '
+
+ The timezone to use when shifting boundaries, especially
+
+ at day granularity.
+
+ Also affects the rendering of timestamps when
+
+ `render.iso_timestamp` is enabled.
+
+
+ When not specified, the `UTC` timezone is used.
+
+ '
+ additionalProperties: true
+ type: object
+ title: Alignment
+ description: 'Aggregation Alignment Options.
+
+
+ Specifies how the aggregation grid is aligned.'
+ CauseException:
+ properties:
+ type:
+ type: string
+ title: Exception Type
+ message:
+ type: string
+ title: Exception Message
+ stacktrace:
+ items:
+ type: string
+ type: array
+ title: Stack Trace
+ additionalProperties: true
+ type: object
+ required:
+ - type
+ - message
+ - stacktrace
+ title: CauseException
+ description: Describes the exception that caused a message.
+ ColumnDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data_axis:
+ type: string
+ enum:
+ - row
+ const: row
+ title: Data Axis
+ default: row
+ rows:
+ oneOf:
+ - prefixItems:
+ - const: resource
+ title: Index label for the resource attribute.
+ - const: metric
+ title: Index label for the metric attribute.
+ - prefixItems:
+ - const: resource
+ title: Index label for the resource attribute.
+ - const: metric
+ title: Index label for the metric attribute.
+ - const: aggregation
+ title: Index label for the aggregation attribute.
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/ColumnIndexRowHeader'
+ - $ref: '#/components/schemas/RowHeader'
+ type: array
+ title: Row Headers
+ description: 'Header Attributes for the index data.
+
+
+ The initial string-valued headers (normally `resource`, `metric`,`aggregation`)
+ indicate that row to contain series attributes.
+
+
+ The remaining object-valued row headers contain the index data.'
+ data:
+ items:
+ items:
+ $ref: '#/components/schemas/Datum'
+ type: array
+ type: array
+ title: Series
+ description: All metric observation values for a single series. Prefixed
+ by the series attributes.
+ additionalProperties: true
+ type: object
+ required:
+ - rows
+ - data
+ title: ColumnDataSet
+ description: 'Column-oriented dataset with rows header.
+
+
+ Timeseries data layout with a rows header containing
+
+ the index data.
+
+ The data array contains series data prefixed by series attributes.
+
+ The `rows` index is prefix by the names of these series attributes.
+
+ Result for render options `data_axis=row` and `header_array=column`.'
+ ColumnHeader:
+ properties:
+ resource:
+ type: string
+ title: Series resource id
+ metric:
+ type: string
+ title: Series metric
+ aggregation:
+ type: string
+ title: Aggregation applied to the series.
+ additionalProperties: true
+ type: object
+ required:
+ - resource
+ - metric
+ title: ColumnHeader
+ description: 'Column attributes.
+
+
+ Attributes that identify and describe the data in this column.'
+ ColumnIndexRowHeader:
+ type: string
+ title: Series Attribute Label
+ description: Label for a series attribute
+ examples:
+ - resource
+ - metric
+ - aggregation
+ DataAxisOption:
+ type: string
+ enum:
+ - row
+ - column
+ title: DataAxisOption
+ description: Allowed values for the render.data_axis option.
+ DataSetAttributes:
+ properties:
+ role:
+ $ref: '#/components/schemas/Role'
+ additionalProperties: true
+ type: object
+ title: DataSetAttributes
+ description: 'Data Set Attributes.
+
+
+ Data attributes that apply to all data in this set.'
+ DataSetWindow:
+ properties:
+ until:
+ type: integer
+ title: Time Axis End
+ description: Exclusive higher bound of the time axis in unix epoch milliseconds.
+ window:
+ type: string
+ format: period
+ title: Time Axis Length
+ description: Time axis length as ISO8601 period.
+ freq:
+ type: string
+ format: period
+ title: Frequency
+ description: Time axis aggregation interval as an ISO8601 period .
+ additionalProperties: true
+ type: object
+ required:
+ - until
+ - window
+ - freq
+ title: DataSetWindow
+ description: 'Data Window.
+
+
+ Statistics of the time axis of a data set.
+
+ Present with render option `include_window_spec=true`.",'
+ Datum:
+ oneOf:
+ - type: number
+ - type: string
+ - type: boolean
+ - type: 'null'
+ title: Value
+ description: 'A single metric value for a timeseries.
+
+
+ A null value indicates that no (aggregated/interpolated) value exists for
+ the corresponding timestamp.'
+ DeleteResponse:
+ properties:
+ messages:
+ items:
+ $ref: '#/components/schemas/Message'
+ type: array
+ title: Messages
+ _links:
+ additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/HALLink'
+ - items:
+ $ref: '#/components/schemas/HALLink'
+ type: array
+ type: object
+ title: ' Links'
+ description: HAL links, indexed by link relation.
+ _embeddings:
+ additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/HALEmbedding'
+ - items:
+ $ref: '#/components/schemas/HALEmbedding'
+ type: array
+ type: object
+ title: ' Embeddings'
+ description: Hal embeddings, indexed by relation.
+ additionalProperties: true
+ type: object
+ title: DeleteResponse
+ description: Confirmation of a delete request.
+ HALEmbedding:
+ properties: {}
+ additionalProperties: true
+ type: object
+ title: HALEmbedding
+ description: Any embedded representation in a HAL response.
+ HALLink:
+ properties:
+ href:
+ type: string
+ title: Link URL
+ description: Target url for this link.
+ type:
+ type: string
+ title: Link type
+ description: Type of the resource referenced by this link.
+ method:
+ $ref: '#/components/schemas/HALLinkMethod'
+ title: Link method
+ description: Http method required to resolve the link.
+ additionalProperties: true
+ type: object
+ required:
+ - href
+ title: HALLink
+ description: A link target in a HAL response.
+ HALLinkMethod:
+ type: string
+ enum:
+ - GET
+ - POST
+ - PUT
+ - DELETE
+ - PATCH
+ title: HALLinkMethod
+ description: An http method that can be specified in a HAL link.
+ HALLinkRole:
+ type: string
+ enum:
+ - self
+ - first
+ - prev
+ - next
+ - last
+ - execute
+ title: HALLinkRole
+ description: Supported link and embedding roles in HAL representations.
+ HTTPValidationError:
+ properties:
+ detail:
+ items:
+ $ref: '#/components/schemas/ValidationError'
+ type: array
+ title: Detail
+ type: object
+ title: HTTPValidationError
+ HeaderArrayOption:
+ type: string
+ enum:
+ - row
+ - column
+ title: HeaderArrayOption
+ description: Allowed values for the render.header_array option.
+ InterpolationMethod:
+ type: string
+ enum:
+ - pad
+ - fixed
+ - backfill
+ - linear
+ - zero
+ - slinear
+ - quadratic
+ - cubic
+ - polynomial
+ - spline
+ - from_derivatives
+ - pchip
+ - akima
+ title: InterpolationMethod
+ description: Interpolation algorithm specifier.
+ InterpolationSpec:
+ properties:
+ method:
+ oneOf:
+ - const: pad
+ title: pad
+ description: Interpolate with the value of the first observed point. This
+ method also extrapolates.
+ - const: fixed
+ title: fixed
+ description: Interpolate with a fixed, user-specified value. This method
+ also extrapolates.
+ - const: backfill
+ title: backfill
+ description: Same as pad, but using the last observed value. This method
+ also extrapolates
+ - const: linear
+ title: linear
+ description: Linearly go from the first observed value of the gap to the
+ last observed oneThis method also extrapolates
+ - const: nearest
+ title: nearest
+ description: Use the value that is closest in time.
+ - const: zero
+ title: zero
+ description: Interpolate with a spline function of order 0, which is a
+ piecewise polynomial.
+ - const: slinear
+ title: slinear
+ description: Interpolate with a spline function of order 1, which is a
+ piecewise polynomial.
+ - const: quadratic
+ title: quadratic
+ description: Interpolate with a spline function of order 2, which is a
+ piecewise polynomial.
+ - const: cubic
+ title: cubic
+ description: Interpolate with a spline function of order 3, which is a
+ piecewise polynomial.
+ - const: polynomial
+ title: polynomial
+ description: Interpolate with a polynomial of the lowest possible degree
+ passing trough the data points.
+ - const: spline
+ title: spline
+ description: Interpolate with a spline function of a user-specified order.
+ - const: from_derivatives
+ title: from_derivatives
+ description: Interpolate with the derivative of order 1.
+ - const: pchip
+ title: pchip
+ description: Interpolate with a piecewise cubic spline function.
+ - const: akima
+ title: akima
+ description: Interpolate with a non-smoothing spline of order 2, called
+ Akima interpolation.
+ type: string
+ title: Interpolation method
+ value:
+ type: integer
+ title: Interpolation parameter
+ description: Optional parameter value for the interpolation method (see
+ method description).
+ order:
+ type: integer
+ title: Interpolation order
+ description: Optional order parameter for the interpolation method (see
+ method description).
+ additionalProperties: true
+ type: object
+ required:
+ - method
+ title: InterpolationSpec
+ description: "Defines whether, and how to treat missing values.\n\nThis can\
+ \ occur in two circumstances when aggregating (setting a sample frequency):\n\
+ * missing values: if there are missing (or invalid) values stored for\na given\
+ \ freq-interval,\n\"interpolation\" specifies how to compute these.\n* down-sampling:\
+ \ when the specified freq is smaller than the series\u2019\nactual frequency.\n\
+ \"interpolation\" specifies how to compute intermediate values."
+ Message:
+ properties:
+ code:
+ anyOf:
+ - type: string
+ - type: 'null'
+ title: Code
+ message:
+ type: string
+ title: Message
+ level:
+ type: string
+ enum:
+ - debug
+ - info
+ - warning
+ - error
+ - fatal
+ title: Level
+ default: info
+ args:
+ anyOf:
+ - type: object
+ - type: 'null'
+ title: Args
+ type: object
+ required:
+ - message
+ title: Message
+ description: Individual (info/warning/error) message in a response.
+ MessageProperties:
+ properties:
+ resource:
+ type: string
+ title: Series resource id
+ metric:
+ type: string
+ title: Series metric
+ additionalProperties: true
+ type: object
+ title: MessageProperties
+ description: Additional message arguments.
+ ObjectData:
+ properties:
+ timestamp:
+ $ref: '#/components/schemas/Timestamp'
+ timestamp_iso:
+ $ref: '#/components/schemas/TimestampIso'
+ role:
+ $ref: '#/components/schemas/Role'
+ resource:
+ type: string
+ title: Resource
+ description: Series resource id, if applicable for all values.
+ metric:
+ type: string
+ title: Metric
+ description: Series metric, if applicable for all values.
+ aggregation:
+ type: string
+ title: Aggregation
+ description: Series aggregation, if applicable for all values.
+ levels:
+ items:
+ type: string
+ type: array
+ title: Hierarchical Levels
+ description: 'Attribute level names used to key the values for this observation.
+
+
+ Levels that are flattened have a dot-separated key.
+
+
+ If all observations have the same attribute for a level, that level might
+ be omitted.'
+ examples:
+ - - resource
+ - metric
+ - aggregation
+ - - resource.metric
+ additionalProperties:
+ oneOf:
+ - type: object
+ title: Hierarchical Data
+ description: Values for the series whose attributes corresponds with the
+ key. Keyed by sub-levels.
+ - $ref: '#/components/schemas/Datum'
+ description: Series value that corresponds with a (flattened) attribute
+ key.
+ title: 'Data '
+ type: object
+ required:
+ - timestamp
+ title: ObjectData
+ description: Result data for a timestamp in object format.
+ ObjectDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data:
+ items:
+ $ref: '#/components/schemas/ObjectData'
+ type: array
+ title: Data
+ additionalProperties: true
+ type: object
+ required:
+ - data
+ title: ObjectDataSet
+ description: "Data result in object format.\n\nResult item when render option\
+ \ `render.header_array` is not set.\n\nThe data values are keyed by their\
+ \ attributes (`resource`, `metric`, `aggregation`),\naccording to the render\
+ \ options:\n* _hierachical_: for each level, a sub-object is created\n (e.g.\
+ \ `render.mode=hier_dict`)\n* _flattened_: the attributes are '.'-separated\
+ \ concatenation\n of the attributes (e.g `render.mode=flat_dict`)\n* _mixed_:\
+ \ (.e.g. `render.mode=metric_flat_dict`) a single level\n (e.g. `metric`)\
+ \ is used as main key, any remaining levels\n (`resource`,`aggregation`)\
+ \ are indicated with a flattened subkey.\n\nWhen `render.rollup=true`, the\
+ \ attribute levels that are the same for all series are\nnot used as key,\
+ \ but reported as a data or table attribute."
+ QueriesListResponse:
+ properties:
+ messages:
+ items:
+ $ref: '#/components/schemas/Message'
+ type: array
+ title: Messages
+ queries:
+ items:
+ $ref: '#/components/schemas/QueryListItem'
+ type: array
+ title: Query item list
+ description: One page of matching query definitions.
+ count:
+ type: integer
+ title: Current page size
+ description: Number of query definitions returned in the current response.
+ offset:
+ type: integer
+ title: Page offset
+ description: Offset in the full listing (skipped definitions).
+ limit:
+ type: integer
+ title: Page size limit
+ description: Maximal number of query definitions returned in one response.
+ total_count:
+ type: integer
+ title: Total count
+ description: Total number of query definitions matching the filter.
+ _links:
+ $ref: '#/components/schemas/QueryListHALLinks'
+ additionalProperties: true
+ type: object
+ required:
+ - queries
+ - count
+ - offset
+ - limit
+ - _links
+ title: QueriesListResponse
+ description: Listing of named queries, with paging links.
+ Query-Input:
+ properties:
+ resource:
+ type: string
+ title: Default Resource
+ description: Default resource for the series in the query.
+ metric:
+ type: string
+ title: Default Metric
+ description: Default metric for the series in the query.
+ aggregation:
+ anyOf:
+ - description: Aggregation method for a series in the query.
+ $ref: '#/components/schemas/AggregationMethod'
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - type: 'null'
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - type: 'null'
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ - type: 'null'
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ - additionalProperties:
+ anyOf:
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - type: 'null'
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ - type: 'null'
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ - type: 'null'
+ type: object
+ title: Aggregation by Resource and Metric
+ description: Aggregation methods specified per resource and metric.
+ - type: 'null'
+ title: Default Aggregation
+ description: Default aggregation method(s) for the series in the query.
+ interpolation:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Method
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation specification.
+ title: Default Interpolation
+ description: Default Interpolation method for the series (if aggregated).
+ freq:
+ oneOf:
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ - type: string
+ const: inferred
+ title: Inferred Frequency
+ description: When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be used to
+ regularize the time series
+ type: string
+ title: Grouping interval
+ description: Interval used to aggregate or regularize data. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+ from:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0.0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ example: PT3H15M
+ title: Time Window From
+ description: The start of the time window for which results will be returned.
+ One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+ until:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0.0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ example: PT3H15M
+ title: Time Window Until
+ description: The end (not-inclusive) of the time window for which results
+ will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.
+ window:
+ oneOf:
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ type: string
+ title: Window
+ description: The absolute size of the time window for which results will
+ be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ periods:
+ type: integer
+ title: Periods
+ description: The size of the time window in number of `freq` units. One
+ of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ align:
+ $ref: '#/components/schemas/Alignment'
+ data:
+ items:
+ $ref: '#/components/schemas/SeriesSpec'
+ type: array
+ title: Series specifications
+ description: List of series specifications. When not specified, a single
+ default series specification is assumed(`[{}]`, using the default `metric`,`resource`,
+ ... ).
+ render:
+ $ref: '#/components/schemas/Render'
+ additionalProperties: true
+ type: object
+ title: Query
+ description: 'Query definition for a Waylay analytics query.
+
+
+ See also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).'
+ Query-Output:
+ properties:
+ resource:
+ type: string
+ title: Default Resource
+ description: Default resource for the series in the query.
+ metric:
+ type: string
+ title: Default Metric
+ description: Default metric for the series in the query.
+ aggregation:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - type: 'null'
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - type: 'null'
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ - type: 'null'
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ - additionalProperties:
+ anyOf:
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ - type: 'null'
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ - type: 'null'
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ - type: 'null'
+ type: object
+ title: Aggregation by Resource and Metric
+ description: Aggregation methods specified per resource and metric.
+ - type: 'null'
+ title: Default Aggregation
+ description: Default aggregation method(s) for the series in the query.
+ interpolation:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Method
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation specification.
+ title: Default Interpolation
+ description: Default Interpolation method for the series (if aggregated).
+ freq:
+ oneOf:
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ - type: string
+ const: inferred
+ title: Inferred Frequency
+ description: When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be used to
+ regularize the time series
+ type: string
+ title: Grouping interval
+ description: Interval used to aggregate or regularize data. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+ from:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0.0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ example: PT3H15M
+ title: Time Window From
+ description: The start of the time window for which results will be returned.
+ One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.
+ until:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0.0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.
+ example: PT3H15M
+ title: Time Window Until
+ description: The end (not-inclusive) of the time window for which results
+ will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.
+ window:
+ oneOf:
+ - type: string
+ pattern: ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ type: string
+ title: Window
+ description: The absolute size of the time window for which results will
+ be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ periods:
+ type: integer
+ title: Periods
+ description: The size of the time window in number of `freq` units. One
+ of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ align:
+ $ref: '#/components/schemas/Alignment'
+ data:
+ items:
+ $ref: '#/components/schemas/SeriesSpec'
+ type: array
+ title: Series specifications
+ description: List of series specifications. When not specified, a single
+ default series specification is assumed(`[{}]`, using the default `metric`,`resource`,
+ ... ).
+ render:
+ $ref: '#/components/schemas/Render'
+ additionalProperties: true
+ type: object
+ title: Query
+ description: 'Query definition for a Waylay analytics query.
+
+
+ See also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).'
+ QueryEntityInput:
+ properties:
+ name:
+ type: string
+ title: Query name
+ description: Name of the stored query definition.
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ query:
+ $ref: '#/components/schemas/Query-Input'
+ additionalProperties: true
+ type: object
+ required:
+ - name
+ - query
+ title: QueryEntityInput
+ description: Input data to create a query definition.
+ QueryExecutionMessage:
+ properties:
+ message:
+ type: string
+ title: Message
+ description: A human readable message.
+ level:
+ type: string
+ enum:
+ - debug
+ - info
+ - warning
+ - error
+ title: Severity Level
+ timestamp:
+ type: string
+ format: date-time
+ title: Timestamp
+ action:
+ type: string
+ title: Action
+ description: The request action that caused this message.
+ category:
+ type: string
+ title: Message Category
+ description: The subsystem that issued this message.
+ examples:
+ - data
+ - broker
+ - opt-cfg
+ properties:
+ anyOf:
+ - type: string
+ - $ref: '#/components/schemas/MessageProperties'
+ title: Message Arguments
+ exception:
+ $ref: '#/components/schemas/CauseException'
+ title: ''
+ description: ''
+ additionalProperties: true
+ type: object
+ required:
+ - message
+ - level
+ - timestamp
+ - action
+ - category
+ title: Message Object
+ description: A message object that informs or warns about a query execution
+ issue.
+ QueryHALLinks:
+ properties:
+ self:
+ $ref: '#/components/schemas/HALLink'
+ title: Self link
+ description: Link to the query definition.
+ execute:
+ $ref: '#/components/schemas/HALLink'
+ title: Execute link
+ description: Link to the query execution.
+ additionalProperties: true
+ type: object
+ required:
+ - self
+ - execute
+ title: QueryHALLinks
+ description: HAL Links for a query entity.
+ QueryListHALLinks:
+ properties:
+ self:
+ $ref: '#/components/schemas/HALLink'
+ title: Self link
+ description: Link to the current page of results.
+ first:
+ $ref: '#/components/schemas/HALLink'
+ title: First page link
+ description: Link to the first page of results.
+ prev:
+ $ref: '#/components/schemas/HALLink'
+ title: Previous page link
+ description: Link to the previous page of results.
+ next:
+ $ref: '#/components/schemas/HALLink'
+ title: Next page link
+ description: Link to the next page of results.
+ last:
+ $ref: '#/components/schemas/HALLink'
+ title: Last page link
+ description: Link to the last page of results.
+ additionalProperties: true
+ type: object
+ required:
+ - self
+ title: QueryListHALLinks
+ description: HAL Links for a query entity.
+ QueryListItem:
+ properties:
+ _links:
+ $ref: '#/components/schemas/QueryHALLinks'
+ attrs:
+ type: object
+ title: Query attributes
+ description: System provided metadata for the query definition.
+ name:
+ type: string
+ title: Query name
+ description: Name of the stored query definition.
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ additionalProperties: true
+ type: object
+ required:
+ - _links
+ - attrs
+ - name
+ title: QueryListItem
+ description: Listing of a query definition item.
+ QueryResponse:
+ properties:
+ _links:
+ $ref: '#/components/schemas/QueryHALLinks'
+ attrs:
+ type: object
+ title: Query attributes
+ description: System provided metadata for the query definition.
+ name:
+ type: string
+ title: Query name
+ description: Name of the stored query definition.
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ query:
+ $ref: '#/components/schemas/Query-Output'
+ messages:
+ items:
+ $ref: '#/components/schemas/Message'
+ type: array
+ title: Messages
+ additionalProperties: true
+ type: object
+ required:
+ - _links
+ - attrs
+ - name
+ - query
+ title: QueryResponse
+ description: Represents a single named query.
+ QueryResult:
+ properties:
+ data:
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/RowDataSet'
+ - $ref: '#/components/schemas/SeriesDataSet'
+ - $ref: '#/components/schemas/ColumnDataSet'
+ - $ref: '#/components/schemas/ObjectDataSet'
+ title: Response Data Set
+ description: Result timeseries data set, with one time dimension.
+ type: array
+ title: Response Data Sets
+ description: 'A list of data sets, each with their own time axis. There
+ will be one dataset for each `role` specified in the query (by default
+ a single `input` role).
+
+
+ The data is represented according to the `render` options in the query
+ (default `COMPACT_WS`).'
+ query:
+ $ref: '#/components/schemas/Query-Input'
+ description: The query that lead to this result.
+ messages:
+ items:
+ $ref: '#/components/schemas/QueryExecutionMessage'
+ type: array
+ title: Messages and Warnings
+ additionalProperties: true
+ type: object
+ required:
+ - data
+ - query
+ - messages
+ title: Result of a timeseries query.
+ description: 'A json data response.
+
+
+ Uses the format as specified by the
+
+ `render` options of the request (defaults to `COMPACT_WS`).
+
+ '''
+ examples:
+ - data: []
+ query:
+ resource: R
+ metric: temperature
+ messages: []
+ QueryUpdateInput:
+ properties:
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ query:
+ $ref: '#/components/schemas/Query-Input'
+ title: Query definition
+ additionalProperties: true
+ type: object
+ title: QueryUpdateInput
+ description: Input data to update a query definition.
+ Render:
+ properties:
+ mode:
+ $ref: '#/components/schemas/_RenderMode'
+ title: Named configuration of render options.
+ description: A render mode combines a number of render option under a single
+ name. Each option can still be overriden by an explicit value.
+ default: COMPACT_WS
+ roll_up:
+ type: boolean
+ title: Roll Up
+ description: move up attributes on rows (or columns) that are the same for all
+ rows (or columns) to a table attribute. Levels enumerated
+ in 'hierarchical' are excluded.
+ hierarchical:
+ anyOf:
+ - type: boolean
+ - items:
+ type: string
+ type: array
+ title: Hierarchical
+ description: if true, use hierarchical objects to represent multiple row
+ (or column) dimensions, otherwise multi-keys get concatenated with a dot-delimiter.
+ If the value is a list, only these levels are kept as separate levels,
+ while remaining levels get concatenated keys
+ value_key:
+ type: string
+ title: Value Key
+ description: if set, use this key in the value object to report data values
+ show_levels:
+ type: boolean
+ title: Show Levels
+ description: if set, report the levels used in the data values (either hierarchical
+ or flat)
+ iso_timestamp:
+ type: boolean
+ title: Iso Timestamp
+ description: if set, render timestamps in a row or column index with both
+ epoch and iso representations
+ row_key:
+ type: string
+ title: Row Key
+ description: if set, use this key as name of the row-dimension for single-dimensional
+ rows
+ column_key:
+ type: string
+ title: Column Key
+ description: if set, use this key as name of the column-dimension for single-dimensional
+ columns
+ header_array:
+ $ref: '#/components/schemas/HeaderArrayOption'
+ description: 'if set, report data as an header and an array. '
+ data_axis:
+ $ref: '#/components/schemas/DataAxisOption'
+ description: orientation of the tabular data as a array of arrays
+ key_seperator:
+ type: string
+ title: Key Seperator
+ description: character used to concatenate multi-key columns or rows when
+ required
+ key_skip_empty:
+ type: boolean
+ title: Key Skip Empty
+ description: skip empty values in concatenating multi-key column or row
+ headers
+ include_window_spec:
+ type: boolean
+ title: Include Window Spec
+ description: if set, include window specification in render modes that support
+ it
+ additionalProperties: true
+ type: object
+ title: Render
+ description: Configures the representation of data sets returned by the query
+ API.
+ Role:
+ type: string
+ title: Data set role
+ description: The role of series specification that was used to compile this
+ data set.
+ examples:
+ - input
+ - exogenous
+ - predictions
+ RowDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data_axis:
+ type: string
+ enum:
+ - column
+ const: column
+ title: Data Axis
+ default: column
+ columns:
+ prefixItems:
+ - const: timestamp
+ title: Unix epoch milliseconds timestamp.
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/RowIndexColumnHeader'
+ - $ref: '#/components/schemas/ColumnHeader'
+ type: array
+ title: Column Headers
+ description: 'Header Attributes for the column data.
+
+
+ The initial string-valued headers (normally a single `timestamp`) indicate
+ that column to contain row index data (i.e. timestamps).
+
+
+ The remaining object-valued column headers identify and describe the actual
+ series data.'
+ data:
+ items:
+ prefixItems:
+ - $ref: '#/components/schemas/Timestamp'
+ items:
+ $ref: '#/components/schemas/Datum'
+ type: array
+ title: Observation
+ description: Row index data (timestamp), and a value for each of the series.
+ type: array
+ title: Data
+ additionalProperties: true
+ type: object
+ required:
+ - columns
+ - data
+ title: RowDataSet
+ description: 'Row-oriented dataset.
+
+
+ Timeseries data layout with a column header and a data row per timestamp.
+
+ Result for render options `data_axis=column` and `header_array=row`.",'
+ RowHeader:
+ properties:
+ timestamp:
+ $ref: '#/components/schemas/Timestamp'
+ timestamp_iso:
+ $ref: '#/components/schemas/TimestampIso'
+ additionalProperties: true
+ type: object
+ required:
+ - timestamp
+ title: RowHeader
+ description: 'Index entry attributes.
+
+
+ Attributes for a timestamp index entry.'
+ RowIndexColumnHeader:
+ type: string
+ title: Row Index Column Header
+ description: 'Header for a column containing a (representation of) the row index
+ value.
+
+ These headers precede the header attributes for row data.'
+ examples:
+ - timestamp
+ - timestamp_iso
+ SeriesDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data_axis:
+ type: string
+ enum:
+ - row
+ const: row
+ title: Data Axis
+ default: row
+ columns:
+ prefixItems:
+ - const: timestamp
+ title: Unix epoch milliseconds timestamp.
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/RowIndexColumnHeader'
+ - $ref: '#/components/schemas/ColumnHeader'
+ type: array
+ title: Column Headers
+ description: 'Header Attributes for the column data.
+
+
+ The initial string-valued headers (normally a single `timestamp`) indicate
+ that column to contain row index data (i.e. timestamps).
+
+
+ The remaining object-valued column headers identify and describe the actual
+ series data.'
+ data:
+ prefixItems:
+ - items:
+ $ref: '#/components/schemas/Timestamp'
+ type: array
+ title: Timestamp Index
+ description: The timestamp index for this result data.
+ items:
+ items:
+ $ref: '#/components/schemas/Datum'
+ type: array
+ title: Series
+ description: All metric observation values for a single series.
+ type: array
+ title: Data
+ additionalProperties: true
+ type: object
+ required:
+ - columns
+ - data
+ title: SeriesDataSet
+ description: 'Column-oriented dataset.
+
+
+ Timeseries data layout with a column header
+
+ and a seperate data array for the time index and each series.
+
+ Result for render options `data_axis=row` and `header_array=row`.'
+ SeriesSpec:
+ properties:
+ name:
+ type: string
+ title: Name
+ description: Optional alias name for the series. This name is used when
+ exporting the dataset to CSV format.
+ examples:
+ - demoQuery
+ resource:
+ type: string
+ title: Resource
+ description: Resource id for the series, required unless it is specified
+ as a query default.
+ examples:
+ - 13efb488-75ac-4dac-828a-d49c5c2ebbfc
+ metric:
+ type: string
+ title: Metric
+ description: Metric name for the series, required unless it is specified
+ as a query default.
+ examples:
+ - temperature
+ - uptime
+ aggregration:
+ $ref: '#/components/schemas/AggregationMethod'
+ interpolation:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Method
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation specification.
+ title: Interpolation
+ additionalProperties: true
+ type: object
+ title: SeriesSpec
+ description: Query specification for a single series.
+ Timestamp:
+ type: integer
+ title: Timestamp
+ description: Unix epoch milliseconds timestamp.
+ TimestampIso:
+ type: string
+ format: date-time
+ title: ISO8601 timestamp
+ description: ISO8601 rendering of the timestamp, present when `render.iso_timestamp=true`
+ ValidationError:
+ properties:
+ loc:
+ items:
+ anyOf:
+ - type: string
+ - type: integer
+ type: array
+ title: Location
+ msg:
+ type: string
+ title: Message
+ type:
+ type: string
+ title: Error Type
+ type: object
+ required:
+ - loc
+ - msg
+ - type
+ title: ValidationError
+ _RenderMode:
+ type: string
+ title: Render Mode
+ oneOf:
+ - const: HEADER_ROW
+ description: 'Render rows of timestamp and values. Show column headers. Includes
+ an iso timestamp.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `header_array`: `row`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `column`'
+ type: string
+ - const: COMPACT
+ description: 'Render rows of timestamp and values. Show column headers.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `header_array`: `row`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `column`'
+ type: string
+ - const: COMPACT_WS
+ description: 'Render rows of timestamp and values. Show column headers. Show
+ the time window attributes.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `header_array`: `row`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `column`
+
+ - `include_window_spec`: `True`'
+ type: string
+ - const: SERIES
+ description: 'Render timestamps and each series (column) as a values array.
+ Show column headers.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `header_array`: `row`
+
+ - `data_axis`: `row`
+
+ - `roll_up`: `True`
+
+ - `include_window_spec`: `True`'
+ type: string
+ - const: HEADER_COLUMN
+ description: 'Renders row index in `rows`, and each series as a values array.
+
+
+ The series are prefixed by their series attributes.The `rows` index is prefixed
+ by the labels for these attributes.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `header_array`: `column`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `row`'
+ type: string
+ - const: FLAT_DICT
+ description: 'Render an object for each observation. Uses flattened keys.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `hierarchical`: `False`
+
+ - `show_levels`: `True`
+
+ - `roll_up`: `False`'
+ type: string
+ - const: HIER_DICT
+ description: 'Render an hierarchical object for each observation. Shows an
+ iso timestamp.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `hierarchical`: `True`
+
+ - `show_levels`: `True`
+
+ - `roll_up`: `True`'
+ type: string
+ - const: METRIC_FLAT_DICT
+ description: 'Render an object with metric keys for each observation. Shows
+ an iso timestamp.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `hierarchical`: `[''metric'']`
+
+ - `show_levels`: `False`
+
+ - `roll_up`: `True`
+
+ - `key_skip_empty`: `True`'
+ type: string
+ - const: UPLOAD
+ description: 'Render in an object format compatible with the `/data/v1/events`
+ upload.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `hierarchical`: `False`
+
+ - `show_levels`: `False`
+
+ - `roll_up`: `True`'
+ type: string
+ - const: CSV
+ description: 'Render in csv format with row headers.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`'
+ type: string
+ description: Render mode configuration keys.
+ securitySchemes:
+ waylayApiKeySecret:
+ type: http
+ description: Waylay apiKey/apiSecret basic authentication. All endpoints support
+ also Waylay JWT Bearer authentication.
+ scheme: basic
+tags:
+- name: Execute
+ description: Execute a named or ad-hoc query.
+- name: Manage
+ description: List, read, create, update and remove queries that are stored by name.
+- name: Status
+ description: Inspect the technical status of the waylay-query service.
+servers:
+- url: https://api.waylay.io
+ description: Waylay enterprise gateway
diff --git a/openapi/queries.transformed.openapi.yaml b/openapi/queries.transformed.openapi.yaml
new file mode 100644
index 0000000..4b979df
--- /dev/null
+++ b/openapi/queries.transformed.openapi.yaml
@@ -0,0 +1,5501 @@
+openapi: 3.0.3
+info:
+ title: 'Waylay Query: timeseries queries (v1 protocol)'
+ description: |-
+
+ Execute and store queries on the Waylay timeseries.
+
+ Protocol version: v1.
+ version: 0.5.0
+paths:
+ /queries/v1/queries/v1:
+ get:
+ tags:
+ - Status
+ summary: Get Version And Health
+ description: Get the version and health status for waylay-query.
+ operationId: get_version_and_health_queries_v1_get
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ additionalProperties:
+ type: string
+ type: object
+ title: Response Get Version And Health Queries V1 Get
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: get
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: false
+ x-consumes-other: false
+ /queries/v1/queries/v1/data:
+ post:
+ tags:
+ - Execute
+ summary: Execute Query
+ description: |-
+ Execute a timeseries query.
+
+ Executes the timeseries query specified in the request body,
+ after applying any overrides from the url parameters.
+ operationId: execute_query_queries_v1_data_post
+ parameters:
+ - name: resource
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Resource
+ description: Default Resource Override.
+ example: 13efb488-75ac-4dac-828a-d49c5c2ebbfc
+ description: Default Resource Override.
+ x-example: '''13efb488-75ac-4dac-828a-d49c5c2ebbfc'''
+ x-showExample: true
+ - name: metric
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Metric
+ description: Default Metric Override.
+ example: temperature
+ description: Default Metric Override.
+ x-example: '''temperature'''
+ x-showExample: true
+ - name: aggregation
+ in: query
+ required: false
+ schema:
+ title: Aggregation Override.
+ $ref: '#/components/schemas/AggregationMethod'
+ x-showExample: false
+ - name: interpolation
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Override.
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation Override.
+ title: Interpolation
+ x-showExample: false
+ - name: freq
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Grouping Interval Override
+ description: Override for the `freq` query attribute.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ - type: string
+ description: >-
+ When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be
+ used to regularize the time series
+ enum:
+ - inferred
+ description: Override for the `freq` query attribute.
+ x-showExample: false
+ - name: from
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: >-
+ Absolute timestamp milliseconds in unix epoch since
+ 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ x-showExample: false
+ - name: until
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: >-
+ Absolute timestamp milliseconds in unix epoch since
+ 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ x-showExample: false
+ - name: window
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Window Override.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ x-showExample: false
+ - name: periods
+ in: query
+ required: false
+ schema:
+ type: integer
+ title: Periods Override.
+ x-showExample: false
+ - name: render
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/_RenderMode'
+ title: Render Mode
+ - $ref: '#/components/schemas/Render'
+ title: Render Override.
+ title: Render
+ x-showExample: false
+ - name: accept
+ in: header
+ required: false
+ schema:
+ anyOf:
+ - type: string
+ nullable: true
+ title: Accept header
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ x-showExample: false
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/Query-Input'
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResult'
+ examples:
+ default:
+ summary: Query response example for `?render=DEFAULT`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 12.67
+ - 80
+ - 12.78
+ - 83.125
+ - - 1718006400000
+ - 13.74
+ - 88
+ - 13.06
+ - 80.875
+ - - 1718035200000
+ - null
+ - null
+ - 13.35
+ - 78.5
+ - - 1718064000000
+ - 7.49
+ - 88
+ - 13
+ - 81.875
+ - - 1718092800000
+ - null
+ - null
+ - 14.84
+ - 62.375
+ - - 1718121600000
+ - null
+ - null
+ - 14.59
+ - 76.5
+ - - 1718150400000
+ - 9.34
+ - 90
+ - 13.08
+ - 85.375
+ - - 1718179200000
+ - null
+ - null
+ - 15.41
+ - 61.75
+ - - 1718208000000
+ - null
+ - null
+ - 15.1
+ - 74
+ - - 1718236800000
+ - 9.03
+ - 90
+ - 14.99
+ - 82.875
+ - - 1718265600000
+ - null
+ - null
+ - 18.64
+ - 57.5
+ - - 1718294400000
+ - null
+ - null
+ - 18.35
+ - 62.5
+ - - 1718323200000
+ - 12.98
+ - 80
+ - 13.02
+ - 81.625
+ - - 1718352000000
+ - null
+ - null
+ - 18.19
+ - 81.5
+ - - 1718380800000
+ - null
+ - null
+ - 17.75
+ - 88.875
+ - - 1718409600000
+ - 11.8
+ - 88
+ - 13.4
+ - 87.25
+ - - 1718438400000
+ - null
+ - null
+ - 16.53
+ - 64.5
+ - - 1718467200000
+ - null
+ - null
+ - 14.69
+ - 84.875
+ - - 1718496000000
+ - 12.12
+ - 89
+ - 13.77
+ - 87.875
+ - - 1718524800000
+ - null
+ - null
+ - 19.27
+ - 73.125
+ - - 1718553600000
+ - null
+ - null
+ - 19.12
+ - 69.75
+ data_axis: column
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.886997+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.887995+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ hier_dict:
+ summary: Query response example for `?render=HIER_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80
+ temperature:
+ max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 83.125
+ temperature:
+ max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88
+ temperature:
+ max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 80.875
+ temperature:
+ max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 78.5
+ temperature:
+ max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88
+ temperature:
+ max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.875
+ temperature:
+ max: 13
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.375
+ temperature:
+ max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 76.5
+ temperature:
+ max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90
+ temperature:
+ max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 85.375
+ temperature:
+ max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 61.75
+ temperature:
+ max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 74
+ temperature:
+ max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90
+ temperature:
+ max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 82.875
+ temperature:
+ max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 57.5
+ temperature:
+ max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.5
+ temperature:
+ max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80
+ temperature:
+ max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.625
+ temperature:
+ max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.5
+ temperature:
+ max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 88.875
+ temperature:
+ max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88
+ temperature:
+ max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.25
+ temperature:
+ max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 64.5
+ temperature:
+ max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 84.875
+ temperature:
+ max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 89
+ temperature:
+ max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.875
+ temperature:
+ max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 73.125
+ temperature:
+ max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 69.75
+ temperature:
+ max: 19.12
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.698912+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.699998+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HIER_DICT
+ upload:
+ summary: Query response example for `?render=UPLOAD`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74
+ - role: input
+ timestamp: 1718236800000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.128684+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.129391+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: UPLOAD
+ flat_dict:
+ summary: Query response example for `?render=FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.689972+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.691573+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: FLAT_DICT
+ metric_flat_dict:
+ summary: Query response example for `?render=METRIC_FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 83.125
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 80.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 78.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.375
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 76.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 85.375
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 61.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 74
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 82.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 57.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.625
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 88.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.25
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 64.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 84.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 89
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 73.125
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 69.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.12
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.460083+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.461241+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: METRIC_FLAT_DICT
+ header_column:
+ summary: Query response example for `?render=HEADER_COLUMN`
+ value:
+ data:
+ - rows:
+ - resource
+ - metric
+ - aggregation
+ - timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ - timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ - timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ - timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ - timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ - timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ - timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ - timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ - timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ - timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ - timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ - timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ - timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ - timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ - timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ - timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ - timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ - timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ - timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ - timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ - timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ data:
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - temperature
+ - max
+ - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - humidity
+ - mean
+ - 80
+ - 88
+ - null
+ - 88
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 80
+ - null
+ - null
+ - 88
+ - null
+ - null
+ - 89
+ - null
+ - null
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - temperature
+ - max
+ - 12.78
+ - 13.06
+ - 13.35
+ - 13
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - humidity
+ - mean
+ - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.733676+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.734610+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HEADER_COLUMN
+ series:
+ summary: Query response example for `?render=SERIES`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 1718006400000
+ - 1718035200000
+ - 1718064000000
+ - 1718092800000
+ - 1718121600000
+ - 1718150400000
+ - 1718179200000
+ - 1718208000000
+ - 1718236800000
+ - 1718265600000
+ - 1718294400000
+ - 1718323200000
+ - 1718352000000
+ - 1718380800000
+ - 1718409600000
+ - 1718438400000
+ - 1718467200000
+ - 1718496000000
+ - 1718524800000
+ - 1718553600000
+ - - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 80
+ - 88
+ - null
+ - 88
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 80
+ - null
+ - null
+ - 88
+ - null
+ - null
+ - 89
+ - null
+ - null
+ - - 12.78
+ - 13.06
+ - 13.35
+ - 13
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.240450+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.241481+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: SERIES
+ text/csv:
+ schema:
+ title: CSV timeseries data response.
+ description: >-
+ A CSV representation of the query result,
+
+ - the first `timestamp` column contains event timestamp in
+ iso format
+
+ - remaining columns contain the (aggregated or
+ non-aggregated) series values, using a
+ `{resource_id}/{metric}` or
+ `{resource_id}/{metric}/{aggregation}` header.
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: execute
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: true
+ x-consumes-other: false
+ /queries/v1/queries/v1/data/{query_name}:
+ get:
+ tags:
+ - Execute
+ summary: Execute Named Query
+ description: |-
+ Execute a named timeseries query.
+
+ Retrieves a stored query definition by name,
+ applies overrides from the url parameters, and executes it.
+ operationId: executeByName_query_queries_v1_data__query_name__get
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ x-showExample: false
+ - name: resource
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Resource
+ description: Default Resource Override.
+ example: 13efb488-75ac-4dac-828a-d49c5c2ebbfc
+ description: Default Resource Override.
+ x-example: '''13efb488-75ac-4dac-828a-d49c5c2ebbfc'''
+ x-showExample: true
+ - name: metric
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Metric
+ description: Default Metric Override.
+ example: temperature
+ description: Default Metric Override.
+ x-example: '''temperature'''
+ x-showExample: true
+ - name: aggregation
+ in: query
+ required: false
+ schema:
+ title: Aggregation Override.
+ $ref: '#/components/schemas/AggregationMethod'
+ x-showExample: false
+ - name: interpolation
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Override.
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation Override.
+ title: Interpolation
+ x-showExample: false
+ - name: freq
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Grouping Interval Override
+ description: Override for the `freq` query attribute.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ - type: string
+ description: >-
+ When `inferred` is specified, the frequency of aggregation
+ will be inferred from the main/first time series. This can be
+ used to regularize the time series
+ enum:
+ - inferred
+ description: Override for the `freq` query attribute.
+ x-showExample: false
+ - name: from
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: >-
+ Absolute timestamp milliseconds in unix epoch since
+ 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ x-showExample: false
+ - name: until
+ in: query
+ required: false
+ schema:
+ type: string
+ title: From Override.
+ oneOf:
+ - title: ISO8601 absolute timestamp
+ type: string
+ format: date-time
+ example: '2018-03-21T12:23:00+01:00'
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ - type: integer
+ title: UNIX epoch milliseconds
+ minimum: 0
+ description: >-
+ Absolute timestamp milliseconds in unix epoch since
+ 1970-01-01.
+ example: 1534836422284
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ x-showExample: false
+ - name: window
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Window Override.
+ oneOf:
+ - type: string
+ format: period
+ example: PT3H15M
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ x-showExample: false
+ - name: periods
+ in: query
+ required: false
+ schema:
+ type: integer
+ title: Periods Override.
+ x-showExample: false
+ - name: render
+ in: query
+ required: false
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/_RenderMode'
+ title: Render Mode
+ - $ref: '#/components/schemas/Render'
+ title: Render Override.
+ title: Render
+ x-showExample: false
+ - name: accept
+ in: header
+ required: false
+ schema:
+ anyOf:
+ - type: string
+ nullable: true
+ title: Accept header
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ description: Use a 'text/csv' accept header to get CSV formatted results.
+ x-showExample: false
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResult'
+ examples:
+ default:
+ summary: Query response example for `?render=DEFAULT`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 12.67
+ - 80
+ - 12.78
+ - 83.125
+ - - 1718006400000
+ - 13.74
+ - 88
+ - 13.06
+ - 80.875
+ - - 1718035200000
+ - null
+ - null
+ - 13.35
+ - 78.5
+ - - 1718064000000
+ - 7.49
+ - 88
+ - 13
+ - 81.875
+ - - 1718092800000
+ - null
+ - null
+ - 14.84
+ - 62.375
+ - - 1718121600000
+ - null
+ - null
+ - 14.59
+ - 76.5
+ - - 1718150400000
+ - 9.34
+ - 90
+ - 13.08
+ - 85.375
+ - - 1718179200000
+ - null
+ - null
+ - 15.41
+ - 61.75
+ - - 1718208000000
+ - null
+ - null
+ - 15.1
+ - 74
+ - - 1718236800000
+ - 9.03
+ - 90
+ - 14.99
+ - 82.875
+ - - 1718265600000
+ - null
+ - null
+ - 18.64
+ - 57.5
+ - - 1718294400000
+ - null
+ - null
+ - 18.35
+ - 62.5
+ - - 1718323200000
+ - 12.98
+ - 80
+ - 13.02
+ - 81.625
+ - - 1718352000000
+ - null
+ - null
+ - 18.19
+ - 81.5
+ - - 1718380800000
+ - null
+ - null
+ - 17.75
+ - 88.875
+ - - 1718409600000
+ - 11.8
+ - 88
+ - 13.4
+ - 87.25
+ - - 1718438400000
+ - null
+ - null
+ - 16.53
+ - 64.5
+ - - 1718467200000
+ - null
+ - null
+ - 14.69
+ - 84.875
+ - - 1718496000000
+ - 12.12
+ - 89
+ - 13.77
+ - 87.875
+ - - 1718524800000
+ - null
+ - null
+ - 19.27
+ - 73.125
+ - - 1718553600000
+ - null
+ - null
+ - 19.12
+ - 69.75
+ data_axis: column
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.886997+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:00:20.887995+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ hier_dict:
+ summary: Query response example for `?render=HIER_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80
+ temperature:
+ max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 83.125
+ temperature:
+ max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88
+ temperature:
+ max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 80.875
+ temperature:
+ max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 78.5
+ temperature:
+ max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88
+ temperature:
+ max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.875
+ temperature:
+ max: 13
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.375
+ temperature:
+ max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 76.5
+ temperature:
+ max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90
+ temperature:
+ max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 85.375
+ temperature:
+ max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 61.75
+ temperature:
+ max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 74
+ temperature:
+ max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 90
+ temperature:
+ max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 82.875
+ temperature:
+ max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 57.5
+ temperature:
+ max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 62.5
+ temperature:
+ max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 80
+ temperature:
+ max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.625
+ temperature:
+ max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 81.5
+ temperature:
+ max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 88.875
+ temperature:
+ max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 88
+ temperature:
+ max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.25
+ temperature:
+ max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 64.5
+ temperature:
+ max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 84.875
+ temperature:
+ max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity:
+ mean: 89
+ temperature:
+ max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 87.875
+ temperature:
+ max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 73.125
+ temperature:
+ max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource
+ - metric
+ - aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc:
+ humidity: {}
+ temperature: {}
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1:
+ humidity:
+ mean: 69.75
+ temperature:
+ max: 19.12
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.698912+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:57:58.699998+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HIER_DICT
+ upload:
+ summary: Query response example for `?render=UPLOAD`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74
+ - role: input
+ timestamp: 1718236800000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.128684+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:03:24.129391+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: UPLOAD
+ flat_dict:
+ summary: Query response example for `?render=FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.67
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 12.78
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 83.125
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 13.74
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.06
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 80.875
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 78.5
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 7.49
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.875
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.84
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.375
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.59
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 76.5
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.34
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.08
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 85.375
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.41
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 61.75
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 15.1
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 74
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 9.03
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.99
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 82.875
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.64
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 57.5
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.35
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 62.5
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.98
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.02
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.625
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 18.19
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 81.5
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 17.75
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 88.875
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 11.8
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.4
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.25
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 16.53
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 64.5
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 14.69
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 84.875
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.temperature.max: 12.12
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.humidity.mean: 89
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 13.77
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 87.875
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.27
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 73.125
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ levels:
+ - resource.metric.aggregation
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.temperature.max: 19.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.humidity.mean: 69.75
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.689972+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T12:59:32.691573+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: FLAT_DICT
+ metric_flat_dict:
+ summary: Query response example for `?render=METRIC_FLAT_DICT`
+ value:
+ data:
+ - - role: input
+ timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 83.125
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.67
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 12.78
+ - role: input
+ timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 80.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 13.74
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.06
+ - role: input
+ timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 78.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.35
+ - role: input
+ timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 7.49
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13
+ - role: input
+ timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.375
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.84
+ - role: input
+ timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 76.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.59
+ - role: input
+ timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 85.375
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.34
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.08
+ - role: input
+ timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 61.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.41
+ - role: input
+ timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 74
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 15.1
+ - role: input
+ timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 90
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 82.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 9.03
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.99
+ - role: input
+ timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 57.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.64
+ - role: input
+ timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 62.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.35
+ - role: input
+ timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 80
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.625
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.98
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.02
+ - role: input
+ timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 81.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 18.19
+ - role: input
+ timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 88.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 17.75
+ - role: input
+ timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 88
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.25
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 11.8
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.4
+ - role: input
+ timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 64.5
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 16.53
+ - role: input
+ timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 84.875
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 14.69
+ - role: input
+ timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ humidity:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.mean: 89
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 87.875
+ temperature:
+ 06e267b9-8714-4c58-ac53-df6a291b59dc.max: 12.12
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 13.77
+ - role: input
+ timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 73.125
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.27
+ - role: input
+ timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ humidity:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.mean: 69.75
+ temperature:
+ 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1.max: 19.12
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.460083+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T15:37:41.461241+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: METRIC_FLAT_DICT
+ header_column:
+ summary: Query response example for `?render=HEADER_COLUMN`
+ value:
+ data:
+ - rows:
+ - resource
+ - metric
+ - aggregation
+ - timestamp: 1717977600000
+ timestamp_iso: '2024-06-10T00:00:00+00:00'
+ - timestamp: 1718006400000
+ timestamp_iso: '2024-06-10T08:00:00+00:00'
+ - timestamp: 1718035200000
+ timestamp_iso: '2024-06-10T16:00:00+00:00'
+ - timestamp: 1718064000000
+ timestamp_iso: '2024-06-11T00:00:00+00:00'
+ - timestamp: 1718092800000
+ timestamp_iso: '2024-06-11T08:00:00+00:00'
+ - timestamp: 1718121600000
+ timestamp_iso: '2024-06-11T16:00:00+00:00'
+ - timestamp: 1718150400000
+ timestamp_iso: '2024-06-12T00:00:00+00:00'
+ - timestamp: 1718179200000
+ timestamp_iso: '2024-06-12T08:00:00+00:00'
+ - timestamp: 1718208000000
+ timestamp_iso: '2024-06-12T16:00:00+00:00'
+ - timestamp: 1718236800000
+ timestamp_iso: '2024-06-13T00:00:00+00:00'
+ - timestamp: 1718265600000
+ timestamp_iso: '2024-06-13T08:00:00+00:00'
+ - timestamp: 1718294400000
+ timestamp_iso: '2024-06-13T16:00:00+00:00'
+ - timestamp: 1718323200000
+ timestamp_iso: '2024-06-14T00:00:00+00:00'
+ - timestamp: 1718352000000
+ timestamp_iso: '2024-06-14T08:00:00+00:00'
+ - timestamp: 1718380800000
+ timestamp_iso: '2024-06-14T16:00:00+00:00'
+ - timestamp: 1718409600000
+ timestamp_iso: '2024-06-15T00:00:00+00:00'
+ - timestamp: 1718438400000
+ timestamp_iso: '2024-06-15T08:00:00+00:00'
+ - timestamp: 1718467200000
+ timestamp_iso: '2024-06-15T16:00:00+00:00'
+ - timestamp: 1718496000000
+ timestamp_iso: '2024-06-16T00:00:00+00:00'
+ - timestamp: 1718524800000
+ timestamp_iso: '2024-06-16T08:00:00+00:00'
+ - timestamp: 1718553600000
+ timestamp_iso: '2024-06-16T16:00:00+00:00'
+ data:
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - temperature
+ - max
+ - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 06e267b9-8714-4c58-ac53-df6a291b59dc
+ - humidity
+ - mean
+ - 80
+ - 88
+ - null
+ - 88
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 80
+ - null
+ - null
+ - 88
+ - null
+ - null
+ - 89
+ - null
+ - null
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - temperature
+ - max
+ - 12.78
+ - 13.06
+ - 13.35
+ - 13
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ - humidity
+ - mean
+ - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.733676+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:02:54.734610+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: HEADER_COLUMN
+ series:
+ summary: Query response example for `?render=SERIES`
+ value:
+ data:
+ - columns:
+ - timestamp
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: temperature
+ aggregation: max
+ - resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ metric: humidity
+ aggregation: mean
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: temperature
+ aggregation: max
+ - resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ metric: humidity
+ aggregation: mean
+ data:
+ - - 1717977600000
+ - 1718006400000
+ - 1718035200000
+ - 1718064000000
+ - 1718092800000
+ - 1718121600000
+ - 1718150400000
+ - 1718179200000
+ - 1718208000000
+ - 1718236800000
+ - 1718265600000
+ - 1718294400000
+ - 1718323200000
+ - 1718352000000
+ - 1718380800000
+ - 1718409600000
+ - 1718438400000
+ - 1718467200000
+ - 1718496000000
+ - 1718524800000
+ - 1718553600000
+ - - 12.67
+ - 13.74
+ - null
+ - 7.49
+ - null
+ - null
+ - 9.34
+ - null
+ - null
+ - 9.03
+ - null
+ - null
+ - 12.98
+ - null
+ - null
+ - 11.8
+ - null
+ - null
+ - 12.12
+ - null
+ - null
+ - - 80
+ - 88
+ - null
+ - 88
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 90
+ - null
+ - null
+ - 80
+ - null
+ - null
+ - 88
+ - null
+ - null
+ - 89
+ - null
+ - null
+ - - 12.78
+ - 13.06
+ - 13.35
+ - 13
+ - 14.84
+ - 14.59
+ - 13.08
+ - 15.41
+ - 15.1
+ - 14.99
+ - 18.64
+ - 18.35
+ - 13.02
+ - 18.19
+ - 17.75
+ - 13.4
+ - 16.53
+ - 14.69
+ - 13.77
+ - 19.27
+ - 19.12
+ - - 83.125
+ - 80.875
+ - 78.5
+ - 81.875
+ - 62.375
+ - 76.5
+ - 85.375
+ - 61.75
+ - 74
+ - 82.875
+ - 57.5
+ - 62.5
+ - 81.625
+ - 81.5
+ - 88.875
+ - 87.25
+ - 64.5
+ - 84.875
+ - 87.875
+ - 73.125
+ - 69.75
+ data_axis: row
+ attributes:
+ role: input
+ window_spec:
+ from: 1717977600000
+ until: 1718582400000
+ window: P7D
+ freq: PT8H
+ messages:
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'temperature', 'max'), aggregated on freq PT8H
+ contains 26 null values. These will NOT be
+ interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.240450+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ - message: >-
+ Series ('06e267b9-8714-4c58-ac53-df6a291b59dc',
+ 'humidity', 'mean'), aggregated on freq PT8H contains
+ 26 null values. These will NOT be interpolated
+ level: warning
+ timestamp: '2024-07-01T13:01:39.241481+00:00'
+ action: GET /queries/v1/data/weatherHistoryDailyAndHourly
+ category: data
+ query:
+ freq: PT8H
+ from_: 2024-06-16 12:00 AM
+ until: 2024-06-17 12:00 AM
+ data:
+ - metric: temperature
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 97de31f5-7b69-4a56-87e0-6b5c8a6b1872
+ aggregation: max
+ - metric: humidity
+ resource: 06e267b9-8714-4c58-ac53-df6a291b59dc
+ seriesId: 8eb0799f-1af7-4af2-8233-517cb4bdc3c6
+ aggregation: mean
+ - metric: temperature
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 5055a943-e1c0-41d6-a6c9-7b20427a742d
+ aggregation: max
+ - metric: humidity
+ resource: 7f33317b-a45e-4b0a-94a2-9bbbdc746cc1
+ seriesId: 16ac581f-f5ce-4845-9b6a-1b16249af051
+ aggregation: mean
+ render:
+ mode: SERIES
+ text/csv:
+ schema:
+ title: CSV timeseries data response.
+ description: >-
+ A CSV representation of the query result,
+
+ - the first `timestamp` column contains event timestamp in
+ iso format
+
+ - remaining columns contain the (aggregated or
+ non-aggregated) series values, using a
+ `{resource_id}/{metric}` or
+ `{resource_id}/{metric}/{aggregation}` header.
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: execute_by_name
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: false
+ x-consumes-other: false
+ /queries/v1/queries/v1/query:
+ get:
+ tags:
+ - Manage
+ summary: List Queries
+ description: List named queries.
+ operationId: list_queries_queries_v1_query_get
+ parameters:
+ - name: q
+ in: query
+ required: false
+ schema:
+ type: string
+ title: Filter condition
+ description: >-
+ The QDSL filter condition for the stored queries. Note that this
+ value needs to be escaped when passed as an url paramater.
+ default: ''
+ description: >-
+ The QDSL filter condition for the stored queries. Note that this
+ value needs to be escaped when passed as an url paramater.
+ examples:
+ resource:
+ summary: resource
+ description: Filter queries that reference a given resource id.
+ value: resource:APL4995
+ user:
+ summary: current user
+ description: Filter queries on the user that created or updated the item.
+ value: user:@me
+ created:
+ summary: created after
+ description: Filter on a creation timestamp interval.
+ value: created:2020-02-20/
+ modified:
+ summary: last modified more than 2 days ago
+ description: Filter on a modification timestamp interval.
+ value: modified:/now-P2D
+ meta_contains:
+ summary: metadata contains
+ description: Filter on a string contained in a metadata property.
+ value: 'meta.comments:contains(''{ "topic" : "general" }'''
+ meta_like:
+ summary: metadata matches
+ description: Filter on a pattern in a metadata property.
+ value: meta.description:like(*http*waylay.io*)
+ tag:
+ summary: tag combination
+ description: Filter queries on tag combinations.
+ value: tag:stable,demo,release_15 tag:demo,release_16 -tag:dummy
+ x-showExample: true
+ - name: limit
+ in: query
+ required: false
+ schema:
+ type: integer
+ maximum: 100
+ title: Page size
+ description: Maximal number of items return in one response.
+ default: 10
+ description: Maximal number of items return in one response.
+ x-showExample: true
+ - name: offset
+ in: query
+ required: false
+ schema:
+ type: integer
+ title: Page offset
+ description: >-
+ Numbers of items to skip before listing results in the response
+ page.
+ default: 0
+ description: >-
+ Numbers of items to skip before listing results in the response
+ page.
+ x-showExample: true
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueriesListResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: list
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: false
+ x-consumes-other: false
+ post:
+ tags:
+ - Manage
+ summary: Post Query
+ description: Create a new named query.
+ operationId: create_query_queries_v1_query_post
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryEntityInput'
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: create
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: true
+ x-consumes-other: false
+ /queries/v1/queries/v1/query/{query_name}:
+ get:
+ tags:
+ - Manage
+ summary: Get Query
+ description: Get the definition of a named query.
+ operationId: get_query_queries_v1_query__query_name__get
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ description: Name of the stored query.
+ description: Name of the stored query.
+ x-showExample: false
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: get
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: false
+ x-consumes-other: false
+ put:
+ tags:
+ - Manage
+ summary: Update Query
+ description: Create or update a named query definition.
+ operationId: update_query_queries_v1_query__query_name__put
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ description: Name of the stored query.
+ description: Name of the stored query.
+ x-showExample: false
+ requestBody:
+ required: true
+ content:
+ application/json:
+ schema:
+ anyOf:
+ - $ref: '#/components/schemas/QueryUpdateInput'
+ - $ref: '#/components/schemas/Query-Input'
+ title: Query Definition
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/QueryResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: update
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: true
+ x-consumes-other: false
+ delete:
+ tags:
+ - Manage
+ summary: Remove Query
+ description: Remove definition of a named query.
+ operationId: remove_query_queries_v1_query__query_name__delete
+ parameters:
+ - name: query_name
+ in: path
+ required: true
+ schema:
+ type: string
+ title: Query Name
+ description: Name of the stored query.
+ description: Name of the stored query.
+ x-showExample: false
+ responses:
+ '200':
+ description: Successful Response
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/DeleteResponse'
+ '422':
+ description: Validation Error
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/HTTPValidationError'
+ security:
+ - waylayApiKeySecret: []
+ x-py-method: remove
+ x-consumes-multipart: false
+ x-consumes-urlencoded: false
+ x-consumes-json: false
+ x-consumes-other: false
+components:
+ schemas:
+ AggregationMethod:
+ type: string
+ oneOf:
+ - description: >-
+ Use the first value (in time) to represent all data for the sample
+ interval.
+ enum:
+ - first
+ - description: >-
+ Use the last value (in time) to represent all data for the sample
+ interval.
+ enum:
+ - last
+ - description: >-
+ Aggregate data by the mean value: The sum of values divided by
+ number of observations.
+ enum:
+ - mean
+ - description: >-
+ Aggregate data by the median value: The n/2-th value when ordered,
+ the average of the (n-1)/2-th and (n+1)/2-th value when n is uneven.
+ enum:
+ - median
+ - description: The sum of all values summarizes the data for the sample interval.
+ enum:
+ - sum
+ - description: Use the count of observations in the sample interval.
+ enum:
+ - count
+ - description: >-
+ Use the standard deviation of all observations in the sample
+ interval.
+ enum:
+ - std
+ - description: Use the maximum of all values in the sample interval.
+ enum:
+ - max
+ - description: Use the minimum of all values in the sample interval.
+ enum:
+ - min
+ - title: Precentile
+ description: >-
+ Aggregate data by the p-th percentile, where p is a number between 0
+ and 1.
+ pattern: ^percentile\((1(\.0*)?)|(0\.[0-9]*)\)$
+ example: percentile(0.02)
+ AlignAt:
+ type: string
+ enum:
+ - grid
+ - boundary
+ - from
+ - until
+ description: |-
+ Possible values for `align.at`.
+
+ * 'grid' Align to a fixed grid (possibly using timezone information)
+ * 'from' Align a the `from` boundary
+ * 'until' Align a the `until` boundary
+ * 'boundary' Align a the `from` boundary if specified,
+ otherwise the `until` boundary.
+
+ When not specified, 'grid' is used.
+ AlignShift:
+ type: string
+ enum:
+ - backward
+ - forward
+ - wrap
+ description: >-
+ Possible values for `align.shift`.
+
+
+ * 'backward': keep the window size of the original interval
+ specification,
+ shifting back.
+ * 'forward': keep the window size of the original interval
+ specification,
+ shifting forward.
+ * 'wrap': enlarge the window size to include all of the original
+ interval.
+
+
+ When not specified, 'backward' is used.
+ Alignment:
+ properties:
+ at:
+ $ref: '#/components/schemas/AlignAt'
+ title: Align At
+ description: >-
+ Method used to align the aggregation grid. The default value is
+ system-dependent (normally `grid`)
+ shift:
+ $ref: '#/components/schemas/AlignShift'
+ title: Align Shift
+ description: |
+
+ Specifies in what direction the query window is shifted
+ to match the alignment specification.
+ When not specified, defaults are:
+ - `backward` when only the `from` boundary is specified.
+ - `forward` when only the `until` boundary is specified.
+ - `wrap` otherwise (_none_ or _both_ boundaries specified).
+ freq:
+ oneOf:
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ - type: string
+ description: >-
+ When `inferred` is specified, the frequency of aggregation will
+ be inferred from the main/first time series. This can be used to
+ regularize the time series
+ enum:
+ - inferred
+ type: string
+ title: Alignment Grid interval.
+ description: >
+
+ Defines the grid used to align the aggregation window.
+
+ The window will align at whole-unit multiples of this interval.
+
+
+ For intervals like `PT1D`, that are timezone-dependent, use the
+
+ `align.timezone` to fix the absolute timestamp of the grid
+ boundaries.
+
+
+ If not specified, defaults to the `freq` aggregation interval.
+ timezone:
+ oneOf:
+ - type: string
+ title: Timezone Identifier
+ description: >-
+ [ICANN timezone
+ identifier](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)
+ - type: string
+ pattern: (+|-)\d\d:\d\d
+ title: UTC Offset
+ description: '[UTC offset](https://en.wikipedia.org/wiki/UTC_offset)'
+ type: string
+ title: Alignment Timezone.
+ description: |
+
+ The timezone to use when shifting boundaries, especially
+ at day granularity.
+ Also affects the rendering of timestamps when
+ `render.iso_timestamp` is enabled.
+
+ When not specified, the `UTC` timezone is used.
+ additionalProperties: true
+ type: object
+ description: |-
+ Aggregation Alignment Options.
+
+ Specifies how the aggregation grid is aligned.
+ CauseException:
+ properties:
+ type:
+ type: string
+ title: Exception Type
+ message:
+ type: string
+ title: Exception Message
+ stacktrace:
+ items:
+ type: string
+ type: array
+ title: Stack Trace
+ additionalProperties: true
+ type: object
+ required:
+ - type
+ - message
+ - stacktrace
+ description: Describes the exception that caused a message.
+ ColumnDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data_axis:
+ type: string
+ enum:
+ - row
+ default: row
+ rows:
+ oneOf:
+ - x-prefixItems:
+ - const: resource
+ title: Index label for the resource attribute.
+ - const: metric
+ title: Index label for the metric attribute.
+ - x-prefixItems:
+ - const: resource
+ title: Index label for the resource attribute.
+ - const: metric
+ title: Index label for the metric attribute.
+ - const: aggregation
+ title: Index label for the aggregation attribute.
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/ColumnIndexRowHeader'
+ - $ref: '#/components/schemas/RowHeader'
+ type: array
+ title: Row Headers
+ description: >-
+ Header Attributes for the index data.
+
+
+ The initial string-valued headers (normally `resource`,
+ `metric`,`aggregation`) indicate that row to contain series
+ attributes.
+
+
+ The remaining object-valued row headers contain the index data.
+ data:
+ items:
+ items:
+ $ref: '#/components/schemas/Datum'
+ type: array
+ type: array
+ title: Series
+ description: >-
+ All metric observation values for a single series. Prefixed by the
+ series attributes.
+ additionalProperties: true
+ type: object
+ required:
+ - rows
+ - data
+ description: |-
+ Column-oriented dataset with rows header.
+
+ Timeseries data layout with a rows header containing
+ the index data.
+ The data array contains series data prefixed by series attributes.
+ The `rows` index is prefix by the names of these series attributes.
+ Result for render options `data_axis=row` and `header_array=column`.
+ ColumnHeader:
+ properties:
+ resource:
+ type: string
+ title: Series resource id
+ metric:
+ type: string
+ title: Series metric
+ aggregation:
+ type: string
+ title: Aggregation applied to the series.
+ additionalProperties: true
+ type: object
+ required:
+ - resource
+ - metric
+ description: |-
+ Column attributes.
+
+ Attributes that identify and describe the data in this column.
+ ColumnIndexRowHeader:
+ type: string
+ description: Label for a series attribute
+ example: resource
+ DataAxisOption:
+ type: string
+ enum:
+ - row
+ - column
+ description: Allowed values for the render.data_axis option.
+ DataSetAttributes:
+ properties:
+ role:
+ $ref: '#/components/schemas/Role'
+ additionalProperties: true
+ type: object
+ description: |-
+ Data Set Attributes.
+
+ Data attributes that apply to all data in this set.
+ DataSetWindow:
+ properties:
+ until:
+ type: integer
+ title: Time Axis End
+ description: Exclusive higher bound of the time axis in unix epoch milliseconds.
+ window:
+ type: string
+ format: period
+ title: Time Axis Length
+ description: Time axis length as ISO8601 period.
+ freq:
+ type: string
+ format: period
+ title: Frequency
+ description: Time axis aggregation interval as an ISO8601 period .
+ additionalProperties: true
+ type: object
+ required:
+ - until
+ - window
+ - freq
+ description: |-
+ Data Window.
+
+ Statistics of the time axis of a data set.
+ Present with render option `include_window_spec=true`.",
+ Datum:
+ oneOf:
+ - type: number
+ nullable: true
+ - type: string
+ nullable: true
+ - type: boolean
+ nullable: true
+ description: >-
+ A single metric value for a timeseries.
+
+
+ A null value indicates that no (aggregated/interpolated) value exists
+ for the corresponding timestamp.
+ DeleteResponse:
+ properties:
+ messages:
+ items:
+ $ref: '#/components/schemas/Message'
+ type: array
+ title: Messages
+ _links:
+ additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/HALLink'
+ - items:
+ $ref: '#/components/schemas/HALLink'
+ type: array
+ type: object
+ title: ' Links'
+ description: HAL links, indexed by link relation.
+ _embeddings:
+ additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/HALEmbedding'
+ - items:
+ $ref: '#/components/schemas/HALEmbedding'
+ type: array
+ type: object
+ title: ' Embeddings'
+ description: Hal embeddings, indexed by relation.
+ additionalProperties: true
+ type: object
+ description: Confirmation of a delete request.
+ HALEmbedding:
+ properties: {}
+ additionalProperties: true
+ type: object
+ description: Any embedded representation in a HAL response.
+ HALLink:
+ properties:
+ href:
+ type: string
+ title: Link URL
+ description: Target url for this link.
+ type:
+ type: string
+ title: Link type
+ description: Type of the resource referenced by this link.
+ method:
+ $ref: '#/components/schemas/HALLinkMethod'
+ title: Link method
+ description: Http method required to resolve the link.
+ additionalProperties: true
+ type: object
+ required:
+ - href
+ description: A link target in a HAL response.
+ HALLinkMethod:
+ type: string
+ enum:
+ - GET
+ - POST
+ - PUT
+ - DELETE
+ - PATCH
+ description: An http method that can be specified in a HAL link.
+ HALLinkRole:
+ type: string
+ enum:
+ - self
+ - first
+ - prev
+ - next
+ - last
+ - execute
+ description: Supported link and embedding roles in HAL representations.
+ HTTPValidationError:
+ properties:
+ detail:
+ items:
+ $ref: '#/components/schemas/ValidationError'
+ type: array
+ title: Detail
+ type: object
+ HeaderArrayOption:
+ type: string
+ enum:
+ - row
+ - column
+ description: Allowed values for the render.header_array option.
+ InterpolationMethod:
+ type: string
+ enum:
+ - pad
+ - fixed
+ - backfill
+ - linear
+ - zero
+ - slinear
+ - quadratic
+ - cubic
+ - polynomial
+ - spline
+ - from_derivatives
+ - pchip
+ - akima
+ description: Interpolation algorithm specifier.
+ InterpolationSpec:
+ properties:
+ method:
+ oneOf:
+ - description: >-
+ Interpolate with the value of the first observed point. This
+ method also extrapolates.
+ enum:
+ - pad
+ - description: >-
+ Interpolate with a fixed, user-specified value. This method also
+ extrapolates.
+ enum:
+ - fixed
+ - description: >-
+ Same as pad, but using the last observed value. This method also
+ extrapolates
+ enum:
+ - backfill
+ - description: >-
+ Linearly go from the first observed value of the gap to the last
+ observed oneThis method also extrapolates
+ enum:
+ - linear
+ - description: Use the value that is closest in time.
+ enum:
+ - nearest
+ - description: >-
+ Interpolate with a spline function of order 0, which is a
+ piecewise polynomial.
+ enum:
+ - zero
+ - description: >-
+ Interpolate with a spline function of order 1, which is a
+ piecewise polynomial.
+ enum:
+ - slinear
+ - description: >-
+ Interpolate with a spline function of order 2, which is a
+ piecewise polynomial.
+ enum:
+ - quadratic
+ - description: >-
+ Interpolate with a spline function of order 3, which is a
+ piecewise polynomial.
+ enum:
+ - cubic
+ - description: >-
+ Interpolate with a polynomial of the lowest possible degree
+ passing trough the data points.
+ enum:
+ - polynomial
+ - description: Interpolate with a spline function of a user-specified order.
+ enum:
+ - spline
+ - description: Interpolate with the derivative of order 1.
+ enum:
+ - from_derivatives
+ - description: Interpolate with a piecewise cubic spline function.
+ enum:
+ - pchip
+ - description: >-
+ Interpolate with a non-smoothing spline of order 2, called Akima
+ interpolation.
+ enum:
+ - akima
+ type: string
+ title: Interpolation method
+ value:
+ type: integer
+ title: Interpolation parameter
+ description: >-
+ Optional parameter value for the interpolation method (see method
+ description).
+ order:
+ type: integer
+ title: Interpolation order
+ description: >-
+ Optional order parameter for the interpolation method (see method
+ description).
+ additionalProperties: true
+ type: object
+ required:
+ - method
+ description: >-
+ Defines whether, and how to treat missing values.
+
+
+ This can occur in two circumstances when aggregating (setting a sample
+ frequency):
+
+ * missing values: if there are missing (or invalid) values stored for
+
+ a given freq-interval,
+
+ "interpolation" specifies how to compute these.
+
+ * down-sampling: when the specified freq is smaller than the series’
+
+ actual frequency.
+
+ "interpolation" specifies how to compute intermediate values.
+ Message:
+ properties:
+ code:
+ anyOf:
+ - type: string
+ nullable: true
+ title: Code
+ message:
+ type: string
+ title: Message
+ level:
+ type: string
+ enum:
+ - debug
+ - info
+ - warning
+ - error
+ - fatal
+ default: info
+ args:
+ anyOf:
+ - type: object
+ nullable: true
+ title: Args
+ type: object
+ required:
+ - message
+ description: Individual (info/warning/error) message in a response.
+ MessageProperties:
+ properties:
+ resource:
+ type: string
+ title: Series resource id
+ metric:
+ type: string
+ title: Series metric
+ additionalProperties: true
+ type: object
+ description: Additional message arguments.
+ ObjectData:
+ properties:
+ timestamp:
+ $ref: '#/components/schemas/Timestamp'
+ timestamp_iso:
+ $ref: '#/components/schemas/TimestampIso'
+ role:
+ $ref: '#/components/schemas/Role'
+ resource:
+ type: string
+ title: Resource
+ description: Series resource id, if applicable for all values.
+ metric:
+ type: string
+ title: Metric
+ description: Series metric, if applicable for all values.
+ aggregation:
+ type: string
+ title: Aggregation
+ description: Series aggregation, if applicable for all values.
+ levels:
+ items:
+ type: string
+ type: array
+ title: Hierarchical Levels
+ description: >-
+ Attribute level names used to key the values for this observation.
+
+
+ Levels that are flattened have a dot-separated key.
+
+
+ If all observations have the same attribute for a level, that level
+ might be omitted.
+ example:
+ - resource
+ - metric
+ - aggregation
+ additionalProperties:
+ oneOf:
+ - type: object
+ title: Hierarchical Data
+ description: >-
+ Values for the series whose attributes corresponds with the key.
+ Keyed by sub-levels.
+ - $ref: '#/components/schemas/Datum'
+ description: Series value that corresponds with a (flattened) attribute key.
+ title: 'Data '
+ type: object
+ required:
+ - timestamp
+ description: Result data for a timestamp in object format.
+ ObjectDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data:
+ items:
+ $ref: '#/components/schemas/ObjectData'
+ type: array
+ title: Data
+ additionalProperties: true
+ type: object
+ required:
+ - data
+ description: >-
+ Data result in object format.
+
+
+ Result item when render option `render.header_array` is not set.
+
+
+ The data values are keyed by their attributes (`resource`, `metric`,
+ `aggregation`),
+
+ according to the render options:
+
+ * _hierachical_: for each level, a sub-object is created
+ (e.g. `render.mode=hier_dict`)
+ * _flattened_: the attributes are '.'-separated concatenation
+ of the attributes (e.g `render.mode=flat_dict`)
+ * _mixed_: (.e.g. `render.mode=metric_flat_dict`) a single level
+ (e.g. `metric`) is used as main key, any remaining levels
+ (`resource`,`aggregation`) are indicated with a flattened subkey.
+
+ When `render.rollup=true`, the attribute levels that are the same for
+ all series are
+
+ not used as key, but reported as a data or table attribute.
+ QueriesListResponse:
+ properties:
+ messages:
+ items:
+ $ref: '#/components/schemas/Message'
+ type: array
+ title: Messages
+ queries:
+ items:
+ $ref: '#/components/schemas/QueryListItem'
+ type: array
+ title: Query item list
+ description: One page of matching query definitions.
+ count:
+ type: integer
+ title: Current page size
+ description: Number of query definitions returned in the current response.
+ offset:
+ type: integer
+ title: Page offset
+ description: Offset in the full listing (skipped definitions).
+ limit:
+ type: integer
+ title: Page size limit
+ description: Maximal number of query definitions returned in one response.
+ total_count:
+ type: integer
+ title: Total count
+ description: Total number of query definitions matching the filter.
+ _links:
+ $ref: '#/components/schemas/QueryListHALLinks'
+ additionalProperties: true
+ type: object
+ required:
+ - queries
+ - count
+ - offset
+ - limit
+ - _links
+ description: Listing of named queries, with paging links.
+ Query-Input:
+ properties:
+ resource:
+ type: string
+ title: Default Resource
+ description: Default resource for the series in the query.
+ metric:
+ type: string
+ title: Default Metric
+ description: Default metric for the series in the query.
+ aggregation:
+ anyOf:
+ - description: Aggregation method for a series in the query.
+ $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ nullable: true
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ nullable: true
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ nullable: true
+ - additionalProperties:
+ anyOf:
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ nullable: true
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ nullable: true
+ type: object
+ title: Aggregation by Resource and Metric
+ description: Aggregation methods specified per resource and metric.
+ nullable: true
+ title: Default Aggregation
+ description: Default aggregation method(s) for the series in the query.
+ interpolation:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Method
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation specification.
+ title: Default Interpolation
+ description: Default Interpolation method for the series (if aggregated).
+ freq:
+ oneOf:
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ - type: string
+ description: >-
+ When `inferred` is specified, the frequency of aggregation will
+ be inferred from the main/first time series. This can be used to
+ regularize the time series
+ enum:
+ - inferred
+ type: string
+ title: Grouping interval
+ description: >-
+ Interval used to aggregate or regularize data. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ from:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ title: Time Window From
+ description: >-
+ The start of the time window for which results will be returned. One
+ of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ until:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ title: Time Window Until
+ description: >-
+ The end (not-inclusive) of the time window for which results will be
+ returned. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.
+ window:
+ oneOf:
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ type: string
+ title: Window
+ description: >-
+ The absolute size of the time window for which results will be
+ returned. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ periods:
+ type: integer
+ title: Periods
+ description: >-
+ The size of the time window in number of `freq` units. One of the
+ [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ align:
+ $ref: '#/components/schemas/Alignment'
+ data:
+ items:
+ $ref: '#/components/schemas/SeriesSpec'
+ type: array
+ title: Series specifications
+ description: >-
+ List of series specifications. When not specified, a single default
+ series specification is assumed(`[{}]`, using the default
+ `metric`,`resource`, ... ).
+ render:
+ $ref: '#/components/schemas/Render'
+ additionalProperties: true
+ type: object
+ description: >-
+ Query definition for a Waylay analytics query.
+
+
+ See also [api
+ docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).
+ Query-Output:
+ properties:
+ resource:
+ type: string
+ title: Default Resource
+ description: Default resource for the series in the query.
+ metric:
+ type: string
+ title: Default Metric
+ description: Default metric for the series in the query.
+ aggregation:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ nullable: true
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ nullable: true
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ nullable: true
+ - additionalProperties:
+ anyOf:
+ - additionalProperties:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ - items:
+ anyOf:
+ - $ref: '#/components/schemas/AggregationMethod'
+ nullable: true
+ type: array
+ title: Aggregations
+ description: Aggregation methods, leading to sepearate series.
+ nullable: true
+ type: object
+ title: Aggregation by Resource or Metric
+ description: Aggregation methods specified per resource or metric.
+ nullable: true
+ type: object
+ title: Aggregation by Resource and Metric
+ description: Aggregation methods specified per resource and metric.
+ nullable: true
+ title: Default Aggregation
+ description: Default aggregation method(s) for the series in the query.
+ interpolation:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Method
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation specification.
+ title: Default Interpolation
+ description: Default Interpolation method for the series (if aggregated).
+ freq:
+ oneOf:
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ - type: string
+ description: >-
+ When `inferred` is specified, the frequency of aggregation will
+ be inferred from the main/first time series. This can be used to
+ regularize the time series
+ enum:
+ - inferred
+ type: string
+ title: Grouping interval
+ description: >-
+ Interval used to aggregate or regularize data. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ from:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ title: Time Window From
+ description: >-
+ The start of the time window for which results will be returned. One
+ of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ until:
+ oneOf:
+ - type: string
+ pattern: '[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?'
+ format: date-time
+ title: ISO8601 absolute timestamp
+ description: >-
+ A date or date-time in
+ [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations)
+ format. When no timezone is specified, the UTC timezone is
+ assumed (`+00:00`)
+ example: '2018-03-21T12:23:00+01:00'
+ - type: integer
+ minimum: 0
+ title: UNIX epoch milliseconds
+ description: Absolute timestamp milliseconds in unix epoch since 1970-01-01.
+ example: 1534836422284
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: ISO8601 Period Before Now
+ description: >-
+ Specifies a timestamp before _now_ as a period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ title: Time Window Until
+ description: >-
+ The end (not-inclusive) of the time window for which results will be
+ returned. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.
+ window:
+ oneOf:
+ - type: string
+ pattern: >-
+ ^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\.[0-9]*)?S)?)?$
+ format: period
+ title: 'ISO8601 period '
+ description: >-
+ A period in [ISO8601
+ duration](https://en.wikipedia.org/wiki/ISO_8601#Durations)
+ format.
+ example: PT3H15M
+ type: string
+ title: Window
+ description: >-
+ The absolute size of the time window for which results will be
+ returned. One of the [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ periods:
+ type: integer
+ title: Periods
+ description: >-
+ The size of the time window in number of `freq` units. One of the
+ [time
+ line](https://docs.waylay.io/#/api/query/?id=time-line-properties)
+ specifiers.
+ align:
+ $ref: '#/components/schemas/Alignment'
+ data:
+ items:
+ $ref: '#/components/schemas/SeriesSpec'
+ type: array
+ title: Series specifications
+ description: >-
+ List of series specifications. When not specified, a single default
+ series specification is assumed(`[{}]`, using the default
+ `metric`,`resource`, ... ).
+ render:
+ $ref: '#/components/schemas/Render'
+ additionalProperties: true
+ type: object
+ description: >-
+ Query definition for a Waylay analytics query.
+
+
+ See also [api
+ docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).
+ QueryEntityInput:
+ properties:
+ name:
+ type: string
+ title: Query name
+ description: Name of the stored query definition.
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ query:
+ $ref: '#/components/schemas/Query-Input'
+ additionalProperties: true
+ type: object
+ required:
+ - name
+ - query
+ description: Input data to create a query definition.
+ QueryExecutionMessage:
+ properties:
+ message:
+ type: string
+ title: Message
+ description: A human readable message.
+ level:
+ type: string
+ enum:
+ - debug
+ - info
+ - warning
+ - error
+ timestamp:
+ type: string
+ format: date-time
+ title: Timestamp
+ action:
+ type: string
+ title: Action
+ description: The request action that caused this message.
+ category:
+ type: string
+ title: Message Category
+ description: The subsystem that issued this message.
+ example: data
+ properties:
+ anyOf:
+ - type: string
+ - $ref: '#/components/schemas/MessageProperties'
+ title: Message Arguments
+ exception:
+ $ref: '#/components/schemas/CauseException'
+ title: ''
+ description: ''
+ additionalProperties: true
+ type: object
+ required:
+ - message
+ - level
+ - timestamp
+ - action
+ - category
+ description: A message object that informs or warns about a query execution issue.
+ QueryHALLinks:
+ properties:
+ self:
+ $ref: '#/components/schemas/HALLink'
+ title: Self link
+ description: Link to the query definition.
+ execute:
+ $ref: '#/components/schemas/HALLink'
+ title: Execute link
+ description: Link to the query execution.
+ additionalProperties: true
+ type: object
+ required:
+ - self
+ - execute
+ description: HAL Links for a query entity.
+ QueryListHALLinks:
+ properties:
+ self:
+ $ref: '#/components/schemas/HALLink'
+ title: Self link
+ description: Link to the current page of results.
+ first:
+ $ref: '#/components/schemas/HALLink'
+ title: First page link
+ description: Link to the first page of results.
+ prev:
+ $ref: '#/components/schemas/HALLink'
+ title: Previous page link
+ description: Link to the previous page of results.
+ next:
+ $ref: '#/components/schemas/HALLink'
+ title: Next page link
+ description: Link to the next page of results.
+ last:
+ $ref: '#/components/schemas/HALLink'
+ title: Last page link
+ description: Link to the last page of results.
+ additionalProperties: true
+ type: object
+ required:
+ - self
+ description: HAL Links for a query entity.
+ QueryListItem:
+ properties:
+ _links:
+ $ref: '#/components/schemas/QueryHALLinks'
+ attrs:
+ type: object
+ title: Query attributes
+ description: System provided metadata for the query definition.
+ name:
+ type: string
+ title: Query name
+ description: Name of the stored query definition.
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ additionalProperties: true
+ type: object
+ required:
+ - _links
+ - attrs
+ - name
+ description: Listing of a query definition item.
+ QueryResponse:
+ properties:
+ _links:
+ $ref: '#/components/schemas/QueryHALLinks'
+ attrs:
+ type: object
+ title: Query attributes
+ description: System provided metadata for the query definition.
+ name:
+ type: string
+ title: Query name
+ description: Name of the stored query definition.
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ query:
+ $ref: '#/components/schemas/Query-Output'
+ messages:
+ items:
+ $ref: '#/components/schemas/Message'
+ type: array
+ title: Messages
+ additionalProperties: true
+ type: object
+ required:
+ - _links
+ - attrs
+ - name
+ - query
+ description: Represents a single named query.
+ QueryResult:
+ properties:
+ data:
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/RowDataSet'
+ - $ref: '#/components/schemas/SeriesDataSet'
+ - $ref: '#/components/schemas/ColumnDataSet'
+ - $ref: '#/components/schemas/ObjectDataSet'
+ title: Response Data Set
+ description: Result timeseries data set, with one time dimension.
+ type: array
+ title: Response Data Sets
+ description: >-
+ A list of data sets, each with their own time axis. There will be
+ one dataset for each `role` specified in the query (by default a
+ single `input` role).
+
+
+ The data is represented according to the `render` options in the
+ query (default `COMPACT_WS`).
+ query:
+ $ref: '#/components/schemas/Query-Input'
+ description: The query that lead to this result.
+ messages:
+ items:
+ $ref: '#/components/schemas/QueryExecutionMessage'
+ type: array
+ title: Messages and Warnings
+ additionalProperties: true
+ type: object
+ required:
+ - data
+ - query
+ - messages
+ description: |-
+ A json data response.
+
+ Uses the format as specified by the
+ `render` options of the request (defaults to `COMPACT_WS`).
+ '
+ example:
+ data: []
+ query:
+ resource: R
+ metric: temperature
+ messages: []
+ QueryUpdateInput:
+ properties:
+ meta:
+ type: object
+ title: Query metadata
+ description: User metadata for the query definition.
+ query:
+ $ref: '#/components/schemas/Query-Input'
+ title: Query definition
+ additionalProperties: true
+ type: object
+ description: Input data to update a query definition.
+ Render:
+ properties:
+ mode:
+ $ref: '#/components/schemas/_RenderMode'
+ title: Named configuration of render options.
+ description: >-
+ A render mode combines a number of render option under a single
+ name. Each option can still be overriden by an explicit value.
+ default: COMPACT_WS
+ roll_up:
+ type: boolean
+ title: Roll Up
+ description: >-
+ move up attributes on rows (or columns) that are the same
+ for all rows (or columns) to a table
+ attribute. Levels enumerated in 'hierarchical' are
+ excluded.
+ hierarchical:
+ anyOf:
+ - type: boolean
+ - items:
+ type: string
+ type: array
+ title: Hierarchical
+ description: >-
+ if true, use hierarchical objects to represent multiple row (or
+ column) dimensions, otherwise multi-keys get concatenated with a
+ dot-delimiter. If the value is a list, only these levels are kept as
+ separate levels, while remaining levels get concatenated keys
+ value_key:
+ type: string
+ title: Value Key
+ description: if set, use this key in the value object to report data values
+ show_levels:
+ type: boolean
+ title: Show Levels
+ description: >-
+ if set, report the levels used in the data values (either
+ hierarchical or flat)
+ iso_timestamp:
+ type: boolean
+ title: Iso Timestamp
+ description: >-
+ if set, render timestamps in a row or column index with both epoch
+ and iso representations
+ row_key:
+ type: string
+ title: Row Key
+ description: >-
+ if set, use this key as name of the row-dimension for
+ single-dimensional rows
+ column_key:
+ type: string
+ title: Column Key
+ description: >-
+ if set, use this key as name of the column-dimension for
+ single-dimensional columns
+ header_array:
+ $ref: '#/components/schemas/HeaderArrayOption'
+ description: 'if set, report data as an header and an array. '
+ data_axis:
+ $ref: '#/components/schemas/DataAxisOption'
+ description: orientation of the tabular data as a array of arrays
+ key_seperator:
+ type: string
+ title: Key Seperator
+ description: >-
+ character used to concatenate multi-key columns or rows when
+ required
+ key_skip_empty:
+ type: boolean
+ title: Key Skip Empty
+ description: skip empty values in concatenating multi-key column or row headers
+ include_window_spec:
+ type: boolean
+ title: Include Window Spec
+ description: if set, include window specification in render modes that support it
+ additionalProperties: true
+ type: object
+ description: Configures the representation of data sets returned by the query API.
+ Role:
+ type: string
+ description: The role of series specification that was used to compile this data set.
+ example: input
+ RowDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data_axis:
+ type: string
+ enum:
+ - column
+ default: column
+ columns:
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/RowIndexColumnHeader'
+ - $ref: '#/components/schemas/ColumnHeader'
+ type: array
+ title: Column Headers
+ description: >-
+ Header Attributes for the column data.
+
+
+ The initial string-valued headers (normally a single `timestamp`)
+ indicate that column to contain row index data (i.e. timestamps).
+
+
+ The remaining object-valued column headers identify and describe the
+ actual series data.
+ x-prefixItems:
+ - const: timestamp
+ title: Unix epoch milliseconds timestamp.
+ data:
+ items:
+ items:
+ $ref: '#/components/schemas/Datum'
+ type: array
+ title: Observation
+ description: Row index data (timestamp), and a value for each of the series.
+ x-prefixItems:
+ - $ref: '#/components/schemas/Timestamp'
+ type: array
+ title: Data
+ additionalProperties: true
+ type: object
+ required:
+ - columns
+ - data
+ description: >-
+ Row-oriented dataset.
+
+
+ Timeseries data layout with a column header and a data row per
+ timestamp.
+
+ Result for render options `data_axis=column` and `header_array=row`.",
+ RowHeader:
+ properties:
+ timestamp:
+ $ref: '#/components/schemas/Timestamp'
+ timestamp_iso:
+ $ref: '#/components/schemas/TimestampIso'
+ additionalProperties: true
+ type: object
+ required:
+ - timestamp
+ description: |-
+ Index entry attributes.
+
+ Attributes for a timestamp index entry.
+ RowIndexColumnHeader:
+ type: string
+ description: >-
+ Header for a column containing a (representation of) the row index
+ value.
+
+ These headers precede the header attributes for row data.
+ example: timestamp
+ SeriesDataSet:
+ properties:
+ attributes:
+ $ref: '#/components/schemas/DataSetAttributes'
+ window_spec:
+ $ref: '#/components/schemas/DataSetWindow'
+ data_axis:
+ type: string
+ enum:
+ - row
+ default: row
+ columns:
+ items:
+ anyOf:
+ - $ref: '#/components/schemas/RowIndexColumnHeader'
+ - $ref: '#/components/schemas/ColumnHeader'
+ type: array
+ title: Column Headers
+ description: >-
+ Header Attributes for the column data.
+
+
+ The initial string-valued headers (normally a single `timestamp`)
+ indicate that column to contain row index data (i.e. timestamps).
+
+
+ The remaining object-valued column headers identify and describe the
+ actual series data.
+ x-prefixItems:
+ - const: timestamp
+ title: Unix epoch milliseconds timestamp.
+ data:
+ items:
+ items:
+ $ref: '#/components/schemas/Datum'
+ type: array
+ title: Series
+ description: All metric observation values for a single series.
+ type: array
+ title: Data
+ x-prefixItems:
+ - items:
+ $ref: '#/components/schemas/Timestamp'
+ type: array
+ title: Timestamp Index
+ description: The timestamp index for this result data.
+ additionalProperties: true
+ type: object
+ required:
+ - columns
+ - data
+ description: |-
+ Column-oriented dataset.
+
+ Timeseries data layout with a column header
+ and a seperate data array for the time index and each series.
+ Result for render options `data_axis=row` and `header_array=row`.
+ SeriesSpec:
+ properties:
+ name:
+ type: string
+ title: Name
+ description: >-
+ Optional alias name for the series. This name is used when exporting
+ the dataset to CSV format.
+ example: demoQuery
+ resource:
+ type: string
+ title: Resource
+ description: >-
+ Resource id for the series, required unless it is specified as a
+ query default.
+ example: 13efb488-75ac-4dac-828a-d49c5c2ebbfc
+ metric:
+ type: string
+ title: Metric
+ description: >-
+ Metric name for the series, required unless it is specified as a
+ query default.
+ example: temperature
+ aggregration:
+ $ref: '#/components/schemas/AggregationMethod'
+ interpolation:
+ anyOf:
+ - $ref: '#/components/schemas/InterpolationMethod'
+ title: Interpolation Method
+ - $ref: '#/components/schemas/InterpolationSpec'
+ title: Interpolation specification.
+ title: Interpolation
+ additionalProperties: true
+ type: object
+ description: Query specification for a single series.
+ Timestamp:
+ type: integer
+ description: Unix epoch milliseconds timestamp.
+ TimestampIso:
+ type: string
+ format: date-time
+ description: >-
+ ISO8601 rendering of the timestamp, present when
+ `render.iso_timestamp=true`
+ ValidationError:
+ properties:
+ loc:
+ items:
+ anyOf:
+ - type: string
+ - type: integer
+ type: array
+ title: Location
+ msg:
+ type: string
+ title: Message
+ type:
+ type: string
+ title: Error Type
+ type: object
+ required:
+ - loc
+ - msg
+ - type
+ _RenderMode:
+ type: string
+ oneOf:
+ - description: >-
+ Render rows of timestamp and values. Show column headers. Includes
+ an iso timestamp.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `header_array`: `row`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `column`
+ type: string
+ enum:
+ - HEADER_ROW
+ - description: |-
+ Render rows of timestamp and values. Show column headers.
+
+ ###### options
+ - `iso_timestamp`: `False`
+ - `header_array`: `row`
+ - `roll_up`: `False`
+ - `data_axis`: `column`
+ type: string
+ enum:
+ - COMPACT
+ - description: >-
+ Render rows of timestamp and values. Show column headers. Show the
+ time window attributes.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `header_array`: `row`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `column`
+
+ - `include_window_spec`: `True`
+ type: string
+ enum:
+ - COMPACT_WS
+ - description: >-
+ Render timestamps and each series (column) as a values array. Show
+ column headers.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `header_array`: `row`
+
+ - `data_axis`: `row`
+
+ - `roll_up`: `True`
+
+ - `include_window_spec`: `True`
+ type: string
+ enum:
+ - SERIES
+ - description: >-
+ Renders row index in `rows`, and each series as a values array.
+
+
+ The series are prefixed by their series attributes.The `rows` index
+ is prefixed by the labels for these attributes.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `header_array`: `column`
+
+ - `roll_up`: `False`
+
+ - `data_axis`: `row`
+ type: string
+ enum:
+ - HEADER_COLUMN
+ - description: |-
+ Render an object for each observation. Uses flattened keys.
+
+ ###### options
+ - `iso_timestamp`: `True`
+ - `hierarchical`: `False`
+ - `show_levels`: `True`
+ - `roll_up`: `False`
+ type: string
+ enum:
+ - FLAT_DICT
+ - description: >-
+ Render an hierarchical object for each observation. Shows an iso
+ timestamp.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `hierarchical`: `True`
+
+ - `show_levels`: `True`
+
+ - `roll_up`: `True`
+ type: string
+ enum:
+ - HIER_DICT
+ - description: >-
+ Render an object with metric keys for each observation. Shows an iso
+ timestamp.
+
+
+ ###### options
+
+ - `iso_timestamp`: `True`
+
+ - `hierarchical`: `['metric']`
+
+ - `show_levels`: `False`
+
+ - `roll_up`: `True`
+
+ - `key_skip_empty`: `True`
+ type: string
+ enum:
+ - METRIC_FLAT_DICT
+ - description: >-
+ Render in an object format compatible with the `/data/v1/events`
+ upload.
+
+
+ ###### options
+
+ - `iso_timestamp`: `False`
+
+ - `hierarchical`: `False`
+
+ - `show_levels`: `False`
+
+ - `roll_up`: `True`
+ type: string
+ enum:
+ - UPLOAD
+ - description: |-
+ Render in csv format with row headers.
+
+ ###### options
+ - `iso_timestamp`: `False`
+ type: string
+ enum:
+ - CSV
+ description: Render mode configuration keys.
+ securitySchemes:
+ waylayApiKeySecret:
+ type: http
+ description: >-
+ Waylay apiKey/apiSecret basic authentication. All endpoints support also
+ Waylay JWT Bearer authentication.
+ scheme: basic
+tags:
+ - name: Execute
+ description: Execute a named or ad-hoc query.
+ - name: Manage
+ description: List, read, create, update and remove queries that are stored by name.
+ - name: Status
+ description: Inspect the technical status of the waylay-query service.
+servers:
+ - url: https://api.waylay.io
+ description: Waylay enterprise gateway
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..5b2aa9d
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,21 @@
+[tool.ruff]
+include = ["pyproject.toml", "test/**/*.py"]
+
+[tool.ruff.lint]
+# allow duplicate imports
+ignore=["F811"]
+# https://docs.astral.sh/ruff/rules
+select= [
+ "UP007", "FA102", # convert Union to | (pep-604)
+ "I001", "F401", # sort and remove unused imports
+ "PIE790", # remove unnecessary pass statements
+ "E303", # too many blank lines
+]
+
+[tool.ruff.lint.per-file-ignores]
+# do not touch imports here
+"__init__.py" = ["F401"]
+"conftest.py" = ["F401"]
+
+[tool.pytest.ini_options]
+asyncio_mode = "auto"
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 0000000..85733e4
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,3 @@
+pydantic ~= 2.6
+typing-extensions ~= 4.9
+waylay-sdk-core ~= 0.2.3
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..3f2c4e3
--- /dev/null
+++ b/test/__init__.py
@@ -0,0 +1 @@
+"""Waylay Query: timeseries queries (v1 protocol)."""
diff --git a/test/api/__init__.py b/test/api/__init__.py
new file mode 100644
index 0000000..9e988c9
--- /dev/null
+++ b/test/api/__init__.py
@@ -0,0 +1 @@
+"""Waylay Query: timeseries queries (v1 protocol) tests."""
diff --git a/test/api/__pycache__/__init__.cpython-311.pyc b/test/api/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..697ad5b
Binary files /dev/null and b/test/api/__pycache__/__init__.cpython-311.pyc differ
diff --git a/test/api/__pycache__/execute_api_test.cpython-311-pytest-8.3.2.pyc b/test/api/__pycache__/execute_api_test.cpython-311-pytest-8.3.2.pyc
new file mode 100644
index 0000000..af9ecc8
Binary files /dev/null and b/test/api/__pycache__/execute_api_test.cpython-311-pytest-8.3.2.pyc differ
diff --git a/test/api/__pycache__/manage_api_test.cpython-311-pytest-8.3.2.pyc b/test/api/__pycache__/manage_api_test.cpython-311-pytest-8.3.2.pyc
new file mode 100644
index 0000000..f4e2f4c
Binary files /dev/null and b/test/api/__pycache__/manage_api_test.cpython-311-pytest-8.3.2.pyc differ
diff --git a/test/api/__pycache__/status_api_test.cpython-311-pytest-8.3.2.pyc b/test/api/__pycache__/status_api_test.cpython-311-pytest-8.3.2.pyc
new file mode 100644
index 0000000..9fb9f67
Binary files /dev/null and b/test/api/__pycache__/status_api_test.cpython-311-pytest-8.3.2.pyc differ
diff --git a/test/api/execute_api_test.py b/test/api/execute_api_test.py
new file mode 100644
index 0000000..f203eee
--- /dev/null
+++ b/test/api/execute_api_test.py
@@ -0,0 +1,203 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) api tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+import re
+from importlib.util import find_spec
+from typing import Union
+from urllib.parse import quote
+
+import pytest
+from pytest_httpx import HTTPXMock
+from typeguard import check_type
+from waylay.sdk import ApiClient, WaylayClient
+from waylay.sdk.api._models import Model
+from waylay.services.queries.api import ExecuteApi
+from waylay.services.queries.service import QueriesService
+
+from ..types.aggregation_method_stub import AggregationMethodStub
+from ..types.from_override_stub import FromOverrideStub
+from ..types.grouping_interval_override_stub import GroupingIntervalOverrideStub
+from ..types.interpolation_stub import InterpolationStub
+from ..types.query_input_stub import QueryInputStub
+from ..types.query_result_stub import QueryResultStub
+from ..types.render1_stub import Render1Stub
+from ..types.window_override_stub import WindowOverrideStub
+
+MODELS_AVAILABLE = (
+ True if find_spec("waylay.services.queries.models") is not None else False
+)
+
+if MODELS_AVAILABLE:
+ from waylay.services.queries.models import QueryResult
+ from waylay.services.queries.queries.execute_api import (
+ ExecuteByNameQuery,
+ ExecuteQuery,
+ )
+
+
+# some mappings that are needed for some interpolations
+null, true, false = None, True, False
+
+
+@pytest.fixture
+def execute_api(waylay_api_client: ApiClient) -> ExecuteApi:
+ return ExecuteApi(waylay_api_client)
+
+
+def test_registered(waylay_client: WaylayClient):
+ """Test that ExecuteApi api is registered in the sdk client."""
+ assert isinstance(waylay_client.queries.execute, ExecuteApi)
+
+
+def _execute_by_name_set_mock_response(
+ httpx_mock: HTTPXMock, gateway_url: str, query_name: str
+):
+ mock_response = QueryResultStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "GET",
+ "url": re.compile(
+ f"^{gateway_url}/queries/v1/queries/v1/data/{query_name}(\\?.*)?"
+ ),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_execute_by_name(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for execute_by_name
+ Execute Named Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {
+ # optionally use ExecuteByNameQuery to validate and reuse parameters
+ "query": ExecuteByNameQuery(
+ resource="13efb488-75ac-4dac-828a-d49c5c2ebbfc",
+ metric="temperature",
+ aggregation=AggregationMethodStub.create_json(),
+ interpolation=InterpolationStub.create_json(),
+ freq=GroupingIntervalOverrideStub.create_json(),
+ var_from=FromOverrideStub.create_json(),
+ until=FromOverrideStub.create_json(),
+ window=WindowOverrideStub.create_json(),
+ periods=56,
+ render=Render1Stub.create_json(),
+ ),
+ }
+ _execute_by_name_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.execute.execute_by_name(query_name, **kwargs)
+ check_type(resp, Union[QueryResult,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_execute_by_name_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for execute_by_name with models not installed
+ Execute Named Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {
+ "query": {
+ "resource": "13efb488-75ac-4dac-828a-d49c5c2ebbfc",
+ "metric": "temperature",
+ "aggregation": AggregationMethodStub.create_json(),
+ "interpolation": InterpolationStub.create_json(),
+ "freq": GroupingIntervalOverrideStub.create_json(),
+ "from": FromOverrideStub.create_json(),
+ "until": FromOverrideStub.create_json(),
+ "window": WindowOverrideStub.create_json(),
+ "periods": 56,
+ "render": Render1Stub.create_json(),
+ },
+ }
+ _execute_by_name_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.execute.execute_by_name(query_name, **kwargs)
+ check_type(resp, Model)
+
+
+def _execute_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str):
+ mock_response = QueryResultStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "POST",
+ "url": re.compile(f"^{gateway_url}/queries/v1/queries/v1/data(\\?.*)?"),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_execute(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for execute
+ Execute Query
+ """
+ # set path params
+ kwargs = {
+ # optionally use ExecuteQuery to validate and reuse parameters
+ "query": ExecuteQuery(
+ resource="13efb488-75ac-4dac-828a-d49c5c2ebbfc",
+ metric="temperature",
+ aggregation=AggregationMethodStub.create_json(),
+ interpolation=InterpolationStub.create_json(),
+ freq=GroupingIntervalOverrideStub.create_json(),
+ var_from=FromOverrideStub.create_json(),
+ until=FromOverrideStub.create_json(),
+ window=WindowOverrideStub.create_json(),
+ periods=56,
+ render=Render1Stub.create_json(),
+ ),
+ "json": QueryInputStub.create_instance(),
+ }
+ _execute_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.execute.execute(**kwargs)
+ check_type(resp, Union[QueryResult,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_execute_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for execute with models not installed
+ Execute Query
+ """
+ # set path params
+ kwargs = {
+ "query": {
+ "resource": "13efb488-75ac-4dac-828a-d49c5c2ebbfc",
+ "metric": "temperature",
+ "aggregation": AggregationMethodStub.create_json(),
+ "interpolation": InterpolationStub.create_json(),
+ "freq": GroupingIntervalOverrideStub.create_json(),
+ "from": FromOverrideStub.create_json(),
+ "until": FromOverrideStub.create_json(),
+ "window": WindowOverrideStub.create_json(),
+ "periods": 56,
+ "render": Render1Stub.create_json(),
+ },
+ "json": QueryInputStub.create_json(),
+ }
+ _execute_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.execute.execute(**kwargs)
+ check_type(resp, Model)
diff --git a/test/api/manage_api_test.py b/test/api/manage_api_test.py
new file mode 100644
index 0000000..fea61ed
--- /dev/null
+++ b/test/api/manage_api_test.py
@@ -0,0 +1,289 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) api tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+import re
+from importlib.util import find_spec
+from typing import Union
+from urllib.parse import quote
+
+import pytest
+from pytest_httpx import HTTPXMock
+from typeguard import check_type
+from waylay.sdk import ApiClient, WaylayClient
+from waylay.sdk.api._models import Model
+from waylay.services.queries.api import ManageApi
+from waylay.services.queries.service import QueriesService
+
+from ..types.delete_response_stub import DeleteResponseStub
+from ..types.queries_list_response_stub import QueriesListResponseStub
+from ..types.query_definition_stub import QueryDefinitionStub
+from ..types.query_entity_input_stub import QueryEntityInputStub
+from ..types.query_response_stub import QueryResponseStub
+
+MODELS_AVAILABLE = (
+ True if find_spec("waylay.services.queries.models") is not None else False
+)
+
+if MODELS_AVAILABLE:
+ from waylay.services.queries.models import (
+ DeleteResponse,
+ QueriesListResponse,
+ QueryResponse,
+ )
+ from waylay.services.queries.queries.manage_api import ListQuery
+
+
+# some mappings that are needed for some interpolations
+null, true, false = None, True, False
+
+
+@pytest.fixture
+def manage_api(waylay_api_client: ApiClient) -> ManageApi:
+ return ManageApi(waylay_api_client)
+
+
+def test_registered(waylay_client: WaylayClient):
+ """Test that ManageApi api is registered in the sdk client."""
+ assert isinstance(waylay_client.queries.manage, ManageApi)
+
+
+def _create_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str):
+ mock_response = QueryResponseStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "POST",
+ "url": re.compile(f"^{gateway_url}/queries/v1/queries/v1/query(\\?.*)?"),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_create(service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock):
+ """Test case for create
+ Post Query
+ """
+ # set path params
+ kwargs = {
+ "json": QueryEntityInputStub.create_instance(),
+ }
+ _create_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.manage.create(**kwargs)
+ check_type(resp, Union[QueryResponse,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_create_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for create with models not installed
+ Post Query
+ """
+ # set path params
+ kwargs = {
+ "json": QueryEntityInputStub.create_json(),
+ }
+ _create_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.manage.create(**kwargs)
+ check_type(resp, Model)
+
+
+def _get_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str, query_name: str):
+ mock_response = QueryResponseStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "GET",
+ "url": re.compile(
+ f"^{gateway_url}/queries/v1/queries/v1/query/{query_name}(\\?.*)?"
+ ),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_get(service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock):
+ """Test case for get
+ Get Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {}
+ _get_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.manage.get(query_name, **kwargs)
+ check_type(resp, Union[QueryResponse,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_get_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for get with models not installed
+ Get Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {}
+ _get_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.manage.get(query_name, **kwargs)
+ check_type(resp, Model)
+
+
+def _list_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str):
+ mock_response = QueriesListResponseStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "GET",
+ "url": re.compile(f"^{gateway_url}/queries/v1/queries/v1/query(\\?.*)?"),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_list(service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock):
+ """Test case for list
+ List Queries
+ """
+ # set path params
+ kwargs = {
+ # optionally use ListQuery to validate and reuse parameters
+ "query": ListQuery(
+ q="",
+ limit=10,
+ offset=0,
+ ),
+ }
+ _list_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.manage.list(**kwargs)
+ check_type(resp, Union[QueriesListResponse,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_list_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for list with models not installed
+ List Queries
+ """
+ # set path params
+ kwargs = {
+ "query": {
+ "q": "",
+ "limit": 10,
+ "offset": 0,
+ },
+ }
+ _list_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.manage.list(**kwargs)
+ check_type(resp, Model)
+
+
+def _remove_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str, query_name: str):
+ mock_response = DeleteResponseStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "DELETE",
+ "url": re.compile(
+ f"^{gateway_url}/queries/v1/queries/v1/query/{query_name}(\\?.*)?"
+ ),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_remove(service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock):
+ """Test case for remove
+ Remove Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {}
+ _remove_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.manage.remove(query_name, **kwargs)
+ check_type(resp, Union[DeleteResponse,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_remove_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for remove with models not installed
+ Remove Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {}
+ _remove_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.manage.remove(query_name, **kwargs)
+ check_type(resp, Model)
+
+
+def _update_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str, query_name: str):
+ mock_response = QueryResponseStub.create_json()
+ httpx_mock_kwargs = {
+ "method": "PUT",
+ "url": re.compile(
+ f"^{gateway_url}/queries/v1/queries/v1/query/{query_name}(\\?.*)?"
+ ),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_update(service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock):
+ """Test case for update
+ Update Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {
+ "json": QueryDefinitionStub.create_instance(),
+ }
+ _update_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.manage.update(query_name, **kwargs)
+ check_type(resp, Union[QueryResponse,])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_update_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for update with models not installed
+ Update Query
+ """
+ # set path params
+ query_name = "query_name_example"
+
+ kwargs = {
+ "json": QueryDefinitionStub.create_json(),
+ }
+ _update_set_mock_response(httpx_mock, gateway_url, quote(str(query_name)))
+ resp = await service.manage.update(query_name, **kwargs)
+ check_type(resp, Model)
diff --git a/test/api/status_api_test.py b/test/api/status_api_test.py
new file mode 100644
index 0000000..e6835da
--- /dev/null
+++ b/test/api/status_api_test.py
@@ -0,0 +1,82 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) api tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+import re
+from importlib.util import find_spec
+from typing import Dict, Union
+
+import pytest
+from pytest_httpx import HTTPXMock
+from typeguard import check_type
+from waylay.sdk import ApiClient, WaylayClient
+from waylay.sdk.api._models import Model
+from waylay.services.queries.api import StatusApi
+from waylay.services.queries.service import QueriesService
+
+MODELS_AVAILABLE = (
+ True if find_spec("waylay.services.queries.models") is not None else False
+)
+
+if MODELS_AVAILABLE:
+ pass
+
+
+# some mappings that are needed for some interpolations
+null, true, false = None, True, False
+
+
+@pytest.fixture
+def status_api(waylay_api_client: ApiClient) -> StatusApi:
+ return StatusApi(waylay_api_client)
+
+
+def test_registered(waylay_client: WaylayClient):
+ """Test that StatusApi api is registered in the sdk client."""
+ assert isinstance(waylay_client.queries.status, StatusApi)
+
+
+def _get_set_mock_response(httpx_mock: HTTPXMock, gateway_url: str):
+ mock_response = {"key": ""}
+ httpx_mock_kwargs = {
+ "method": "GET",
+ "url": re.compile(f"^{gateway_url}/queries/v1/queries/v1(\\?.*)?"),
+ "content": json.dumps(mock_response, default=str),
+ "status_code": 200,
+ }
+ httpx_mock.add_response(**httpx_mock_kwargs)
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(not MODELS_AVAILABLE, reason="Types not installed.")
+async def test_get(service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock):
+ """Test case for get
+ Get Version And Health
+ """
+ # set path params
+ kwargs = {}
+ _get_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.status.get(**kwargs)
+ check_type(resp, Union[Dict[str, str],])
+
+
+@pytest.mark.asyncio
+@pytest.mark.skipif(MODELS_AVAILABLE, reason="Types installed.")
+async def test_get_without_types(
+ service: QueriesService, gateway_url: str, httpx_mock: HTTPXMock
+):
+ """Test case for get with models not installed
+ Get Version And Health
+ """
+ # set path params
+ kwargs = {}
+ _get_set_mock_response(httpx_mock, gateway_url)
+ resp = await service.status.get(**kwargs)
+ check_type(resp, Model)
diff --git a/test/conftest.py b/test/conftest.py
new file mode 100644
index 0000000..2de03b8
--- /dev/null
+++ b/test/conftest.py
@@ -0,0 +1,71 @@
+"""Automatic pytest fixtures."""
+
+import random
+
+import httpx
+import pytest
+import starlette.requests as req
+import starlette.responses as res
+from waylay.sdk import ApiClient, WaylayClient, WaylayConfig
+from waylay.sdk.auth import NoCredentials
+from waylay.services.queries.service import QueriesService
+
+random.seed(10)
+GATEWAY_URL = "http://example.io"
+
+
+@pytest.fixture(name="gateway_url")
+def fixture_gateway_url() -> str:
+ return GATEWAY_URL
+
+
+@pytest.fixture(name="waylay_config")
+def fixture_config(gateway_url) -> WaylayConfig:
+ return WaylayConfig(credentials=NoCredentials(gateway_url=gateway_url))
+
+
+@pytest.fixture(name="waylay_api_client")
+def fixture_api_client(waylay_config: WaylayConfig) -> ApiClient:
+ return ApiClient(waylay_config, {"auth": None})
+
+
+@pytest.fixture(name="service")
+def fixture_service(waylay_api_client: ApiClient) -> QueriesService:
+ return QueriesService(waylay_api_client)
+
+
+@pytest.fixture(name="waylay_client")
+def fixture_waylay_client(waylay_config: WaylayConfig) -> WaylayClient:
+ return WaylayClient(waylay_config, {"auth": None})
+
+
+@pytest.fixture(name="test_app", scope="module")
+def fixture_my_app():
+ async def echo_app(scope, receive, send):
+ request = req.Request(scope, receive)
+ content_type = request.headers.get("content-type", "application/octet-stream")
+ if content_type.startswith("application/json"):
+ response = res.JSONResponse(await request.json())
+ elif content_type.startswith("multipart/form-data") or content_type.startswith(
+ "application/x-www-form-urlencoded"
+ ):
+ form = await request.form()
+ response = res.JSONResponse({
+ key: (value if isinstance(value, str) else {"size": value.size})
+ for key, value in form.items()
+ })
+ else:
+ bytes = await request.body()
+ response = res.JSONResponse({"bytes": str(bytes, encoding="utf-8")})
+ await response(scope, receive, send)
+
+ return echo_app
+
+
+@pytest.fixture(name="echo_service")
+async def fixture_echo_client(service, test_app):
+ async with service({
+ "transport": httpx.ASGITransport(test_app),
+ "auth": None,
+ }) as srv:
+ yield srv
diff --git a/test/openapi.py b/test/openapi.py
new file mode 100644
index 0000000..23239bc
--- /dev/null
+++ b/test/openapi.py
@@ -0,0 +1,2315 @@
+import json
+
+import yaml
+
+
+def with_example_provider(dct):
+ has_example = False
+ if "example" in dct:
+ example, has_example = dct["example"], True
+ elif "examples" in dct:
+ examples = dct["examples"]
+ if isinstance(examples, list) and list:
+ example, has_example = examples[0], True
+ elif "default" in dct:
+ example, has_example = dct["default"], True
+
+ if has_example:
+ provider = (
+ example
+ if example is None or isinstance(example, (dict, list, int, float, bool))
+ else f"'{example}'"
+ )
+ dct.update({"$provider": f"lambda: {provider}"})
+ return dct
+
+
+with open("openapi/queries.transformed.openapi.yaml", "r") as file:
+ OPENAPI_SPEC = yaml.safe_load(file)
+
+MODEL_DEFINITIONS = OPENAPI_SPEC["components"]["schemas"]
+
+_aggregation_by_resource_and_metric_model_schema = json.loads(
+ r"""{
+ "anyOf" : [ {
+ "title" : "Aggregation by Resource or Metric",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/Aggregation_by_Resource_or_Metric"
+ },
+ "description" : "Aggregation methods specified per resource or metric.",
+ "nullable" : true
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Aggregation_by_Resource_and_Metric": _aggregation_by_resource_and_metric_model_schema
+})
+
+_aggregation_by_resource_or_metric_model_schema = json.loads(
+ r"""{
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ }, {
+ "title" : "Aggregations",
+ "type" : "array",
+ "description" : "Aggregation methods, leading to sepearate series.",
+ "nullable" : true,
+ "items" : {
+ "$ref" : "#/components/schemas/Aggregations_inner"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Aggregation_by_Resource_or_Metric": _aggregation_by_resource_or_metric_model_schema
+})
+
+_aggregation_method_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "nullable" : true,
+ "oneOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_1"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_2"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_3"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_4"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_5"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_6"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_7"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_8"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"AggregationMethod": _aggregation_method_model_schema})
+
+_aggregation_method_one_of_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf",
+ "type" : "string",
+ "description" : "Use the first value (in time) to represent all data for the sample interval.",
+ "enum" : [ "first" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf": _aggregation_method_one_of_model_schema
+})
+
+_aggregation_method_one_of_1_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_1",
+ "type" : "string",
+ "description" : "Use the last value (in time) to represent all data for the sample interval.",
+ "enum" : [ "last" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_1": _aggregation_method_one_of_1_model_schema
+})
+
+_aggregation_method_one_of_2_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_2",
+ "type" : "string",
+ "description" : "Aggregate data by the mean value: The sum of values divided by number of observations.",
+ "enum" : [ "mean" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_2": _aggregation_method_one_of_2_model_schema
+})
+
+_aggregation_method_one_of_3_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_3",
+ "type" : "string",
+ "description" : "Aggregate data by the median value: The n/2-th value when ordered, the average of the (n-1)/2-th and (n+1)/2-th value when n is uneven.",
+ "enum" : [ "median" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_3": _aggregation_method_one_of_3_model_schema
+})
+
+_aggregation_method_one_of_4_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_4",
+ "type" : "string",
+ "description" : "The sum of all values summarizes the data for the sample interval.",
+ "enum" : [ "sum" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_4": _aggregation_method_one_of_4_model_schema
+})
+
+_aggregation_method_one_of_5_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_5",
+ "type" : "string",
+ "description" : "Use the count of observations in the sample interval.",
+ "enum" : [ "count" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_5": _aggregation_method_one_of_5_model_schema
+})
+
+_aggregation_method_one_of_6_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_6",
+ "type" : "string",
+ "description" : "Use the standard deviation of all observations in the sample interval.",
+ "enum" : [ "std" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_6": _aggregation_method_one_of_6_model_schema
+})
+
+_aggregation_method_one_of_7_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_7",
+ "type" : "string",
+ "description" : "Use the maximum of all values in the sample interval.",
+ "enum" : [ "max" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_7": _aggregation_method_one_of_7_model_schema
+})
+
+_aggregation_method_one_of_8_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_8",
+ "type" : "string",
+ "description" : "Use the minimum of all values in the sample interval.",
+ "enum" : [ "min" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "AggregationMethod_oneOf_8": _aggregation_method_one_of_8_model_schema
+})
+
+_aggregations_inner_model_schema = json.loads(
+ r"""{
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Aggregations_inner": _aggregations_inner_model_schema})
+
+_align_at_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Possible values for `align.at`.\n\n* 'grid' Align to a fixed grid (possibly using timezone information)\n* 'from' Align a the `from` boundary\n* 'until' Align a the `until` boundary\n* 'boundary' Align a the `from` boundary if specified,\n otherwise the `until` boundary.\n\nWhen not specified, 'grid' is used.",
+ "enum" : [ "grid", "boundary", "from", "until" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"AlignAt": _align_at_model_schema})
+
+_align_shift_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Possible values for `align.shift`.\n\n* 'backward': keep the window size of the original interval specification,\n shifting back.\n* 'forward': keep the window size of the original interval specification,\n shifting forward.\n* 'wrap': enlarge the window size to include all of the original interval.\n\nWhen not specified, 'backward' is used.",
+ "enum" : [ "backward", "forward", "wrap" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"AlignShift": _align_shift_model_schema})
+
+_alignment_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "at" : {
+ "$ref" : "#/components/schemas/AlignAt"
+ },
+ "shift" : {
+ "$ref" : "#/components/schemas/AlignShift"
+ },
+ "freq" : {
+ "$ref" : "#/components/schemas/Alignment_Grid_interval_"
+ },
+ "timezone" : {
+ "$ref" : "#/components/schemas/Alignment_Timezone_"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Aggregation Alignment Options.\n\nSpecifies how the aggregation grid is aligned."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Alignment": _alignment_model_schema})
+
+_alignment_grid_interval__model_schema = json.loads(
+ r"""{
+ "title" : "Alignment Grid interval.",
+ "type" : "string",
+ "description" : "\nDefines the grid used to align the aggregation window.\nThe window will align at whole-unit multiples of this interval.\n\nFor intervals like `PT1D`, that are timezone-dependent, use the \n`align.timezone` to fix the absolute timestamp of the grid boundaries.\n\nIf not specified, defaults to the `freq` aggregation interval.\n",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ }, {
+ "$ref" : "#/components/schemas/Grouping_Interval_Override_oneOf"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Alignment_Grid_interval_": _alignment_grid_interval__model_schema
+})
+
+_alignment_timezone__model_schema = json.loads(
+ r"""{
+ "title" : "Alignment Timezone.",
+ "type" : "string",
+ "description" : "\nThe timezone to use when shifting boundaries, especially\nat day granularity.\nAlso affects the rendering of timestamps when\n`render.iso_timestamp` is enabled.\n\nWhen not specified, the `UTC` timezone is used.\n",
+ "oneOf" : [ {
+ "title" : "Timezone Identifier",
+ "type" : "string",
+ "description" : "[ICANN timezone identifier](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)"
+ }, {
+ "title" : "UTC Offset",
+ "pattern" : "(+|-)\\d\\d:\\d\\d",
+ "type" : "string",
+ "description" : "[UTC offset](https://en.wikipedia.org/wiki/UTC_offset)"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Alignment_Timezone_": _alignment_timezone__model_schema})
+
+_cause_exception_model_schema = json.loads(
+ r"""{
+ "required" : [ "message", "stacktrace", "type" ],
+ "type" : "object",
+ "properties" : {
+ "type" : {
+ "title" : "Exception Type",
+ "type" : "string"
+ },
+ "message" : {
+ "title" : "Exception Message",
+ "type" : "string"
+ },
+ "stacktrace" : {
+ "title" : "Stack Trace",
+ "type" : "array",
+ "items" : {
+ "type" : "string"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Describes the exception that caused a message."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"CauseException": _cause_exception_model_schema})
+
+_column_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "data", "rows" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/ColumnDataSet_data_axis"
+ },
+ "rows" : {
+ "title" : "Row Headers",
+ "type" : "array",
+ "description" : "Header Attributes for the index data.\n\nThe initial string-valued headers (normally `resource`, `metric`,`aggregation`) indicate that row to contain series attributes.\n\nThe remaining object-valued row headers contain the index data.",
+ "items" : {
+ "$ref" : "#/components/schemas/Row_Headers_inner"
+ }
+ },
+ "data" : {
+ "title" : "Series",
+ "type" : "array",
+ "description" : "All metric observation values for a single series. Prefixed by the series attributes.",
+ "items" : {
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Datum"
+ }
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Column-oriented dataset with rows header.\n\nTimeseries data layout with a rows header containing\nthe index data.\nThe data array contains series data prefixed by series attributes.\nThe `rows` index is prefix by the names of these series attributes.\nResult for render options `data_axis=row` and `header_array=column`."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"ColumnDataSet": _column_data_set_model_schema})
+
+_column_data_set_data_axis_model_schema = json.loads(
+ r"""{
+ "title" : "ColumnDataSet_data_axis",
+ "type" : "string",
+ "default" : "row",
+ "enum" : [ "row" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "ColumnDataSet_data_axis": _column_data_set_data_axis_model_schema
+})
+
+_column_header_model_schema = json.loads(
+ r"""{
+ "required" : [ "metric", "resource" ],
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Series resource id",
+ "type" : "string"
+ },
+ "metric" : {
+ "title" : "Series metric",
+ "type" : "string"
+ },
+ "aggregation" : {
+ "title" : "Aggregation applied to the series.",
+ "type" : "string"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Column attributes.\n\nAttributes that identify and describe the data in this column."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"ColumnHeader": _column_header_model_schema})
+
+_column_headers_inner_model_schema = json.loads(
+ r"""{
+ "title" : "Column_Headers_inner",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/RowIndexColumnHeader"
+ }, {
+ "$ref" : "#/components/schemas/ColumnHeader"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Column_Headers_inner": _column_headers_inner_model_schema})
+
+_data_axis_option_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Allowed values for the render.data_axis option.",
+ "enum" : [ "row", "column" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"DataAxisOption": _data_axis_option_model_schema})
+
+_data_set_attributes_model_schema = json.loads(
+ r"""{
+ "title" : "DataSetAttributes",
+ "type" : "object",
+ "properties" : {
+ "role" : {
+ "$ref" : "#/components/schemas/Role"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Data Set Attributes.\n\nData attributes that apply to all data in this set."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"DataSetAttributes": _data_set_attributes_model_schema})
+
+_data_set_window_model_schema = json.loads(
+ r"""{
+ "title" : "DataSetWindow",
+ "required" : [ "freq", "until", "window" ],
+ "type" : "object",
+ "properties" : {
+ "until" : {
+ "title" : "Time Axis End",
+ "type" : "integer",
+ "description" : "Exclusive higher bound of the time axis in unix epoch milliseconds."
+ },
+ "window" : {
+ "title" : "Time Axis Length",
+ "type" : "string",
+ "description" : "Time axis length as ISO8601 period.",
+ "format" : "period"
+ },
+ "freq" : {
+ "title" : "Frequency",
+ "type" : "string",
+ "description" : "Time axis aggregation interval as an ISO8601 period .",
+ "format" : "period"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Data Window.\n\nStatistics of the time axis of a data set.\nPresent with render option `include_window_spec=true`.\","
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"DataSetWindow": _data_set_window_model_schema})
+
+_datum_model_schema = json.loads(
+ r"""{
+ "title" : "Datum",
+ "description" : "A single metric value for a timeseries.\n\nA null value indicates that no (aggregated/interpolated) value exists for the corresponding timestamp.",
+ "oneOf" : [ {
+ "type" : "number",
+ "nullable" : true
+ }, {
+ "type" : "string",
+ "nullable" : true
+ }, {
+ "type" : "boolean",
+ "nullable" : true
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Datum": _datum_model_schema})
+
+_default_aggregation_model_schema = json.loads(
+ r"""{
+ "title" : "Default Aggregation",
+ "description" : "Default aggregation method(s) for the series in the query.",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ }, {
+ "title" : "Aggregations",
+ "type" : "array",
+ "description" : "Aggregation methods, leading to sepearate series.",
+ "nullable" : true,
+ "items" : {
+ "$ref" : "#/components/schemas/Aggregations_inner"
+ }
+ }, {
+ "title" : "Aggregation by Resource or Metric",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/Aggregation_by_Resource_or_Metric"
+ },
+ "description" : "Aggregation methods specified per resource or metric.",
+ "nullable" : true
+ }, {
+ "title" : "Aggregation by Resource and Metric",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/Aggregation_by_Resource_and_Metric"
+ },
+ "description" : "Aggregation methods specified per resource and metric.",
+ "nullable" : true
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Default_Aggregation": _default_aggregation_model_schema})
+
+_default_interpolation_model_schema = json.loads(
+ r"""{
+ "title" : "Default Interpolation",
+ "description" : "Default Interpolation method for the series (if aggregated).",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/InterpolationMethod"
+ }, {
+ "$ref" : "#/components/schemas/InterpolationSpec"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Default_Interpolation": _default_interpolation_model_schema})
+
+_delete_response_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "messages" : {
+ "title" : "Messages",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Message"
+ }
+ },
+ "_links" : {
+ "title" : " Links",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/_Links"
+ },
+ "description" : "HAL links, indexed by link relation."
+ },
+ "_embeddings" : {
+ "title" : " Embeddings",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/_Embeddings"
+ },
+ "description" : "Hal embeddings, indexed by relation."
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Confirmation of a delete request."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"DeleteResponse": _delete_response_model_schema})
+
+_embeddings_model_schema = json.loads(
+ r"""{
+ "title" : "_Embeddings",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/HALEmbedding"
+ }, {
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/HALEmbedding"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_Embeddings": _embeddings_model_schema})
+
+_from_override__model_schema = json.loads(
+ r"""{
+ "title" : "From Override.",
+ "type" : "string",
+ "oneOf" : [ {
+ "title" : "ISO8601 absolute timestamp",
+ "pattern" : "[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?",
+ "type" : "string",
+ "description" : "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ "format" : "date-time",
+ "example" : "2018-03-21T12:23:00+01:00"
+ }, {
+ "title" : "UNIX epoch milliseconds",
+ "minimum" : 0,
+ "type" : "integer",
+ "description" : "Absolute timestamp milliseconds in unix epoch since 1970-01-01.",
+ "example" : 1534836422284
+ }, {
+ "title" : "ISO8601 Period Before Now",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"From_Override_": _from_override__model_schema})
+
+_grouping_interval_model_schema = json.loads(
+ r"""{
+ "title" : "Grouping interval",
+ "type" : "string",
+ "description" : "Interval used to aggregate or regularize data. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ }, {
+ "$ref" : "#/components/schemas/Grouping_Interval_Override_oneOf"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Grouping_interval": _grouping_interval_model_schema})
+
+_grouping_interval_override_model_schema = json.loads(
+ r"""{
+ "title" : "Grouping Interval Override",
+ "type" : "string",
+ "description" : "Override for the `freq` query attribute.",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ }, {
+ "$ref" : "#/components/schemas/Grouping_Interval_Override_oneOf"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Grouping_Interval_Override": _grouping_interval_override_model_schema
+})
+
+_grouping_interval_override_one_of_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "When `inferred` is specified, the frequency of aggregation will be inferred from the main/first time series. This can be used to regularize the time series",
+ "enum" : [ "inferred" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Grouping_Interval_Override_oneOf": _grouping_interval_override_one_of_model_schema
+})
+
+_hal_link_model_schema = json.loads(
+ r"""{
+ "required" : [ "href" ],
+ "type" : "object",
+ "properties" : {
+ "href" : {
+ "title" : "Link URL",
+ "type" : "string",
+ "description" : "Target url for this link."
+ },
+ "type" : {
+ "title" : "Link type",
+ "type" : "string",
+ "description" : "Type of the resource referenced by this link."
+ },
+ "method" : {
+ "$ref" : "#/components/schemas/HALLinkMethod"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "A link target in a HAL response."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"HALLink": _hal_link_model_schema})
+
+_hal_link_method_model_schema = json.loads(
+ r"""{
+ "title" : "HALLinkMethod",
+ "type" : "string",
+ "description" : "An http method that can be specified in a HAL link.",
+ "enum" : [ "GET", "POST", "PUT", "DELETE", "PATCH" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"HALLinkMethod": _hal_link_method_model_schema})
+
+_hal_link_role_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Supported link and embedding roles in HAL representations.",
+ "enum" : [ "self", "first", "prev", "next", "last", "execute" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"HALLinkRole": _hal_link_role_model_schema})
+
+_http_validation_error_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "detail" : {
+ "title" : "Detail",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/ValidationError"
+ }
+ }
+ }
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"HTTPValidationError": _http_validation_error_model_schema})
+
+_header_array_option_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Allowed values for the render.header_array option.",
+ "enum" : [ "row", "column" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"HeaderArrayOption": _header_array_option_model_schema})
+
+_hierarchical_model_schema = json.loads(
+ r"""{
+ "title" : "Hierarchical",
+ "description" : "if true, use hierarchical objects to represent multiple row (or column) dimensions, otherwise multi-keys get concatenated with a dot-delimiter. If the value is a list, only these levels are kept as separate levels, while remaining levels get concatenated keys",
+ "anyOf" : [ {
+ "type" : "boolean"
+ }, {
+ "type" : "array",
+ "items" : {
+ "type" : "string"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Hierarchical": _hierarchical_model_schema})
+
+_interpolation_model_schema = json.loads(
+ r"""{
+ "title" : "Interpolation",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/InterpolationMethod"
+ }, {
+ "$ref" : "#/components/schemas/InterpolationSpec"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Interpolation": _interpolation_model_schema})
+
+_interpolation_method_model_schema = json.loads(
+ r"""{
+ "title" : "Interpolation method",
+ "type" : "string",
+ "oneOf" : [ {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_1"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_2"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_3"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_4"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_5"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_6"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_7"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_8"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_9"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_10"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_11"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_12"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_13"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Interpolation_method": _interpolation_method_model_schema})
+
+_interpolation_method_one_of_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with the value of the first observed point. This method also extrapolates.",
+ "enum" : [ "pad" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf": _interpolation_method_one_of_model_schema
+})
+
+_interpolation_method_one_of_1_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a fixed, user-specified value. This method also extrapolates.",
+ "enum" : [ "fixed" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_1": _interpolation_method_one_of_1_model_schema
+})
+
+_interpolation_method_one_of_10_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of a user-specified order.",
+ "enum" : [ "spline" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_10": _interpolation_method_one_of_10_model_schema
+})
+
+_interpolation_method_one_of_11_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with the derivative of order 1.",
+ "enum" : [ "from_derivatives" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_11": _interpolation_method_one_of_11_model_schema
+})
+
+_interpolation_method_one_of_12_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a piecewise cubic spline function.",
+ "enum" : [ "pchip" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_12": _interpolation_method_one_of_12_model_schema
+})
+
+_interpolation_method_one_of_13_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a non-smoothing spline of order 2, called Akima interpolation.",
+ "enum" : [ "akima" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_13": _interpolation_method_one_of_13_model_schema
+})
+
+_interpolation_method_one_of_2_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Same as pad, but using the last observed value. This method also extrapolates",
+ "enum" : [ "backfill" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_2": _interpolation_method_one_of_2_model_schema
+})
+
+_interpolation_method_one_of_3_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Linearly go from the first observed value of the gap to the last observed oneThis method also extrapolates",
+ "enum" : [ "linear" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_3": _interpolation_method_one_of_3_model_schema
+})
+
+_interpolation_method_one_of_4_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Use the value that is closest in time.",
+ "enum" : [ "nearest" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_4": _interpolation_method_one_of_4_model_schema
+})
+
+_interpolation_method_one_of_5_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 0, which is a piecewise polynomial.",
+ "enum" : [ "zero" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_5": _interpolation_method_one_of_5_model_schema
+})
+
+_interpolation_method_one_of_6_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 1, which is a piecewise polynomial.",
+ "enum" : [ "slinear" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_6": _interpolation_method_one_of_6_model_schema
+})
+
+_interpolation_method_one_of_7_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 2, which is a piecewise polynomial.",
+ "enum" : [ "quadratic" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_7": _interpolation_method_one_of_7_model_schema
+})
+
+_interpolation_method_one_of_8_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 3, which is a piecewise polynomial.",
+ "enum" : [ "cubic" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_8": _interpolation_method_one_of_8_model_schema
+})
+
+_interpolation_method_one_of_9_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a polynomial of the lowest possible degree passing trough the data points.",
+ "enum" : [ "polynomial" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "Interpolation_method_oneOf_9": _interpolation_method_one_of_9_model_schema
+})
+
+_interpolation_spec_model_schema = json.loads(
+ r"""{
+ "required" : [ "method" ],
+ "type" : "object",
+ "properties" : {
+ "method" : {
+ "$ref" : "#/components/schemas/Interpolation_method"
+ },
+ "value" : {
+ "title" : "Interpolation parameter",
+ "type" : "integer",
+ "description" : "Optional parameter value for the interpolation method (see method description)."
+ },
+ "order" : {
+ "title" : "Interpolation order",
+ "type" : "integer",
+ "description" : "Optional order parameter for the interpolation method (see method description)."
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Defines whether, and how to treat missing values.\n\nThis can occur in two circumstances when aggregating (setting a sample frequency):\n* missing values: if there are missing (or invalid) values stored for\na given freq-interval,\n\"interpolation\" specifies how to compute these.\n* down-sampling: when the specified freq is smaller than the series’\nactual frequency.\n\"interpolation\" specifies how to compute intermediate values."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"InterpolationSpec": _interpolation_spec_model_schema})
+
+_links_model_schema = json.loads(
+ r"""{
+ "title" : "_Links",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/HALLink"
+ }, {
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/HALLink"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_Links": _links_model_schema})
+
+_location_inner_model_schema = json.loads(
+ r"""{
+ "title" : "Location_inner",
+ "anyOf" : [ {
+ "type" : "string"
+ }, {
+ "type" : "integer"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Location_inner": _location_inner_model_schema})
+
+_message_model_schema = json.loads(
+ r"""{
+ "title" : "Message",
+ "required" : [ "message" ],
+ "type" : "object",
+ "properties" : {
+ "code" : {
+ "title" : "code",
+ "type" : "string",
+ "nullable" : true
+ },
+ "message" : {
+ "title" : "Message",
+ "type" : "string"
+ },
+ "level" : {
+ "$ref" : "#/components/schemas/Message_level"
+ },
+ "args" : {
+ "title" : "args",
+ "type" : "object",
+ "nullable" : true
+ }
+ },
+ "description" : "Individual (info/warning/error) message in a response."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Message": _message_model_schema})
+
+_message_arguments_model_schema = json.loads(
+ r"""{
+ "title" : "Message Arguments",
+ "anyOf" : [ {
+ "type" : "string"
+ }, {
+ "$ref" : "#/components/schemas/MessageProperties"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Message_Arguments": _message_arguments_model_schema})
+
+_message_level_model_schema = json.loads(
+ r"""{
+ "title" : "Message_level",
+ "type" : "string",
+ "default" : "info",
+ "enum" : [ "debug", "info", "warning", "error", "fatal" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Message_level": _message_level_model_schema})
+
+_message_properties_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Series resource id",
+ "type" : "string"
+ },
+ "metric" : {
+ "title" : "Series metric",
+ "type" : "string"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Additional message arguments."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"MessageProperties": _message_properties_model_schema})
+
+_object_data_model_schema = json.loads(
+ r"""{
+ "required" : [ "timestamp" ],
+ "type" : "object",
+ "properties" : {
+ "timestamp" : {
+ "$ref" : "#/components/schemas/Timestamp"
+ },
+ "timestamp_iso" : {
+ "$ref" : "#/components/schemas/TimestampIso"
+ },
+ "role" : {
+ "$ref" : "#/components/schemas/Role"
+ },
+ "resource" : {
+ "title" : "Resource",
+ "type" : "string",
+ "description" : "Series resource id, if applicable for all values."
+ },
+ "metric" : {
+ "title" : "Metric",
+ "type" : "string",
+ "description" : "Series metric, if applicable for all values."
+ },
+ "aggregation" : {
+ "title" : "Aggregation",
+ "type" : "string",
+ "description" : "Series aggregation, if applicable for all values."
+ },
+ "levels" : {
+ "title" : "Hierarchical Levels",
+ "type" : "array",
+ "description" : "Attribute level names used to key the values for this observation.\n\nLevels that are flattened have a dot-separated key.\n\nIf all observations have the same attribute for a level, that level might be omitted.",
+ "example" : [ "resource", "metric", "aggregation" ],
+ "items" : {
+ "type" : "string"
+ }
+ }
+ },
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/ObjectData_value"
+ },
+ "description" : "Result data for a timestamp in object format."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"ObjectData": _object_data_model_schema})
+
+_object_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "data" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data" : {
+ "title" : "Data",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/ObjectData"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Data result in object format.\n\nResult item when render option `render.header_array` is not set.\n\nThe data values are keyed by their attributes (`resource`, `metric`, `aggregation`),\naccording to the render options:\n* _hierachical_: for each level, a sub-object is created\n (e.g. `render.mode=hier_dict`)\n* _flattened_: the attributes are '.'-separated concatenation\n of the attributes (e.g `render.mode=flat_dict`)\n* _mixed_: (.e.g. `render.mode=metric_flat_dict`) a single level\n (e.g. `metric`) is used as main key, any remaining levels\n (`resource`,`aggregation`) are indicated with a flattened subkey.\n\nWhen `render.rollup=true`, the attribute levels that are the same for all series are\nnot used as key, but reported as a data or table attribute."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"ObjectDataSet": _object_data_set_model_schema})
+
+_object_data_value_model_schema = json.loads(
+ r"""{
+ "title" : "Data ",
+ "oneOf" : [ {
+ "title" : "Hierarchical Data",
+ "type" : "object",
+ "description" : "Values for the series whose attributes corresponds with the key. Keyed by sub-levels."
+ }, {
+ "$ref" : "#/components/schemas/Datum"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"ObjectData_value": _object_data_value_model_schema})
+
+_queries_list_response_model_schema = json.loads(
+ r"""{
+ "required" : [ "_links", "count", "limit", "offset", "queries" ],
+ "type" : "object",
+ "properties" : {
+ "messages" : {
+ "title" : "Messages",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Message"
+ }
+ },
+ "queries" : {
+ "title" : "Query item list",
+ "type" : "array",
+ "description" : "One page of matching query definitions.",
+ "items" : {
+ "$ref" : "#/components/schemas/QueryListItem"
+ }
+ },
+ "count" : {
+ "title" : "Current page size",
+ "type" : "integer",
+ "description" : "Number of query definitions returned in the current response."
+ },
+ "offset" : {
+ "title" : "Page offset",
+ "type" : "integer",
+ "description" : "Offset in the full listing (skipped definitions)."
+ },
+ "limit" : {
+ "title" : "Page size limit",
+ "type" : "integer",
+ "description" : "Maximal number of query definitions returned in one response."
+ },
+ "total_count" : {
+ "title" : "Total count",
+ "type" : "integer",
+ "description" : "Total number of query definitions matching the filter."
+ },
+ "_links" : {
+ "$ref" : "#/components/schemas/QueryListHALLinks"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Listing of named queries, with paging links."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueriesListResponse": _queries_list_response_model_schema})
+
+_query_definition_model_schema = json.loads(
+ r"""{
+ "title" : "Query Definition",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/QueryUpdateInput"
+ }, {
+ "$ref" : "#/components/schemas/Query-Input"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Query_Definition": _query_definition_model_schema})
+
+_query_entity_input_model_schema = json.loads(
+ r"""{
+ "required" : [ "name", "query" ],
+ "type" : "object",
+ "properties" : {
+ "name" : {
+ "title" : "Query name",
+ "type" : "string",
+ "description" : "Name of the stored query definition."
+ },
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Input"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Input data to create a query definition."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryEntityInput": _query_entity_input_model_schema})
+
+_query_execution_message_model_schema = json.loads(
+ r"""{
+ "required" : [ "action", "category", "level", "message", "timestamp" ],
+ "type" : "object",
+ "properties" : {
+ "message" : {
+ "title" : "Message",
+ "type" : "string",
+ "description" : "A human readable message."
+ },
+ "level" : {
+ "$ref" : "#/components/schemas/QueryExecutionMessage_level"
+ },
+ "timestamp" : {
+ "title" : "Timestamp",
+ "type" : "string",
+ "format" : "date-time"
+ },
+ "action" : {
+ "title" : "Action",
+ "type" : "string",
+ "description" : "The request action that caused this message."
+ },
+ "category" : {
+ "title" : "Message Category",
+ "type" : "string",
+ "description" : "The subsystem that issued this message.",
+ "example" : "data"
+ },
+ "properties" : {
+ "$ref" : "#/components/schemas/Message_Arguments"
+ },
+ "exception" : {
+ "$ref" : "#/components/schemas/CauseException"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "A message object that informs or warns about a query execution issue."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "QueryExecutionMessage": _query_execution_message_model_schema
+})
+
+_query_execution_message_level_model_schema = json.loads(
+ r"""{
+ "title" : "QueryExecutionMessage_level",
+ "type" : "string",
+ "enum" : [ "debug", "info", "warning", "error" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({
+ "QueryExecutionMessage_level": _query_execution_message_level_model_schema
+})
+
+_query_hal_links_model_schema = json.loads(
+ r"""{
+ "required" : [ "execute", "self" ],
+ "type" : "object",
+ "properties" : {
+ "self" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "execute" : {
+ "$ref" : "#/components/schemas/HALLink"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "HAL Links for a query entity."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryHALLinks": _query_hal_links_model_schema})
+
+_query_input_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Default Resource",
+ "type" : "string",
+ "description" : "Default resource for the series in the query."
+ },
+ "metric" : {
+ "title" : "Default Metric",
+ "type" : "string",
+ "description" : "Default metric for the series in the query."
+ },
+ "aggregation" : {
+ "$ref" : "#/components/schemas/Default_Aggregation"
+ },
+ "interpolation" : {
+ "$ref" : "#/components/schemas/Default_Interpolation"
+ },
+ "freq" : {
+ "$ref" : "#/components/schemas/Grouping_interval"
+ },
+ "from" : {
+ "$ref" : "#/components/schemas/Time_Window_From"
+ },
+ "until" : {
+ "$ref" : "#/components/schemas/Time_Window_Until"
+ },
+ "window" : {
+ "$ref" : "#/components/schemas/Window"
+ },
+ "periods" : {
+ "title" : "Periods",
+ "type" : "integer",
+ "description" : "The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers."
+ },
+ "align" : {
+ "$ref" : "#/components/schemas/Alignment"
+ },
+ "data" : {
+ "title" : "Series specifications",
+ "type" : "array",
+ "description" : "List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ).",
+ "items" : {
+ "$ref" : "#/components/schemas/SeriesSpec"
+ }
+ },
+ "render" : {
+ "$ref" : "#/components/schemas/Render"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Query definition for a Waylay analytics query.\n\nSee also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation)."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Query-Input": _query_input_model_schema})
+
+_query_list_hal_links_model_schema = json.loads(
+ r"""{
+ "title" : "QueryListHALLinks",
+ "required" : [ "self" ],
+ "type" : "object",
+ "properties" : {
+ "self" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "first" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "prev" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "next" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "last" : {
+ "$ref" : "#/components/schemas/HALLink"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "HAL Links for a query entity."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryListHALLinks": _query_list_hal_links_model_schema})
+
+_query_list_item_model_schema = json.loads(
+ r"""{
+ "title" : "QueryListItem",
+ "required" : [ "_links", "attrs", "name" ],
+ "type" : "object",
+ "properties" : {
+ "_links" : {
+ "$ref" : "#/components/schemas/QueryHALLinks"
+ },
+ "attrs" : {
+ "title" : "Query attributes",
+ "type" : "object",
+ "description" : "System provided metadata for the query definition."
+ },
+ "name" : {
+ "title" : "Query name",
+ "type" : "string",
+ "description" : "Name of the stored query definition."
+ },
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Listing of a query definition item."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryListItem": _query_list_item_model_schema})
+
+_query_output_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Default Resource",
+ "type" : "string",
+ "description" : "Default resource for the series in the query."
+ },
+ "metric" : {
+ "title" : "Default Metric",
+ "type" : "string",
+ "description" : "Default metric for the series in the query."
+ },
+ "aggregation" : {
+ "$ref" : "#/components/schemas/Default_Aggregation"
+ },
+ "interpolation" : {
+ "$ref" : "#/components/schemas/Default_Interpolation"
+ },
+ "freq" : {
+ "$ref" : "#/components/schemas/Grouping_interval"
+ },
+ "from" : {
+ "$ref" : "#/components/schemas/Time_Window_From"
+ },
+ "until" : {
+ "$ref" : "#/components/schemas/Time_Window_Until"
+ },
+ "window" : {
+ "$ref" : "#/components/schemas/Window"
+ },
+ "periods" : {
+ "title" : "Periods",
+ "type" : "integer",
+ "description" : "The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers."
+ },
+ "align" : {
+ "$ref" : "#/components/schemas/Alignment"
+ },
+ "data" : {
+ "title" : "Series specifications",
+ "type" : "array",
+ "description" : "List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ).",
+ "items" : {
+ "$ref" : "#/components/schemas/SeriesSpec"
+ }
+ },
+ "render" : {
+ "$ref" : "#/components/schemas/Render"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Query definition for a Waylay analytics query.\n\nSee also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation)."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Query-Output": _query_output_model_schema})
+
+_query_response_model_schema = json.loads(
+ r"""{
+ "required" : [ "_links", "attrs", "name", "query" ],
+ "type" : "object",
+ "properties" : {
+ "_links" : {
+ "$ref" : "#/components/schemas/QueryHALLinks"
+ },
+ "attrs" : {
+ "title" : "Query attributes",
+ "type" : "object",
+ "description" : "System provided metadata for the query definition."
+ },
+ "name" : {
+ "title" : "Query name",
+ "type" : "string",
+ "description" : "Name of the stored query definition."
+ },
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Output"
+ },
+ "messages" : {
+ "title" : "Messages",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Message"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Represents a single named query."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryResponse": _query_response_model_schema})
+
+_query_result_model_schema = json.loads(
+ r"""{
+ "required" : [ "data", "messages", "query" ],
+ "type" : "object",
+ "properties" : {
+ "data" : {
+ "title" : "Response Data Sets",
+ "type" : "array",
+ "description" : "A list of data sets, each with their own time axis. There will be one dataset for each `role` specified in the query (by default a single `input` role).\n\nThe data is represented according to the `render` options in the query (default `COMPACT_WS`).",
+ "items" : {
+ "$ref" : "#/components/schemas/Response_Data_Set"
+ }
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Input"
+ },
+ "messages" : {
+ "title" : "Messages and Warnings",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/QueryExecutionMessage"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "A json data response.\n\nUses the format as specified by the\n`render` options of the request (defaults to `COMPACT_WS`).\n'",
+ "example" : {
+ "data" : [ ],
+ "query" : {
+ "resource" : "R",
+ "metric" : "temperature"
+ },
+ "messages" : [ ]
+ }
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryResult": _query_result_model_schema})
+
+_query_update_input_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Input"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Input data to update a query definition."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"QueryUpdateInput": _query_update_input_model_schema})
+
+_render_model_schema = json.loads(
+ r"""{
+ "title" : "Render",
+ "type" : "object",
+ "properties" : {
+ "mode" : {
+ "$ref" : "#/components/schemas/_RenderMode"
+ },
+ "roll_up" : {
+ "title" : "Roll Up",
+ "type" : "boolean",
+ "description" : "move up attributes on rows (or columns) that are the same for all rows (or columns) to a table attribute. Levels enumerated in 'hierarchical' are excluded."
+ },
+ "hierarchical" : {
+ "$ref" : "#/components/schemas/Hierarchical"
+ },
+ "value_key" : {
+ "title" : "Value Key",
+ "type" : "string",
+ "description" : "if set, use this key in the value object to report data values"
+ },
+ "show_levels" : {
+ "title" : "Show Levels",
+ "type" : "boolean",
+ "description" : "if set, report the levels used in the data values (either hierarchical or flat)"
+ },
+ "iso_timestamp" : {
+ "title" : "Iso Timestamp",
+ "type" : "boolean",
+ "description" : "if set, render timestamps in a row or column index with both epoch and iso representations"
+ },
+ "row_key" : {
+ "title" : "Row Key",
+ "type" : "string",
+ "description" : "if set, use this key as name of the row-dimension for single-dimensional rows"
+ },
+ "column_key" : {
+ "title" : "Column Key",
+ "type" : "string",
+ "description" : "if set, use this key as name of the column-dimension for single-dimensional columns"
+ },
+ "header_array" : {
+ "$ref" : "#/components/schemas/HeaderArrayOption"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/DataAxisOption"
+ },
+ "key_seperator" : {
+ "title" : "Key Seperator",
+ "type" : "string",
+ "description" : "character used to concatenate multi-key columns or rows when required"
+ },
+ "key_skip_empty" : {
+ "title" : "Key Skip Empty",
+ "type" : "boolean",
+ "description" : "skip empty values in concatenating multi-key column or row headers"
+ },
+ "include_window_spec" : {
+ "title" : "Include Window Spec",
+ "type" : "boolean",
+ "description" : "if set, include window specification in render modes that support it"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Configures the representation of data sets returned by the query API."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Render": _render_model_schema})
+
+_render_1_model_schema = json.loads(
+ r"""{
+ "title" : "Render",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/_RenderMode"
+ }, {
+ "$ref" : "#/components/schemas/Render"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Render_1": _render_1_model_schema})
+
+_render_mode_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode",
+ "type" : "string",
+ "description" : "Render mode configuration keys.",
+ "oneOf" : [ {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_1"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_2"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_3"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_4"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_5"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_6"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_7"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_8"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_9"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode": _render_mode_model_schema})
+
+_render_mode_one_of_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf",
+ "type" : "string",
+ "description" : "Render rows of timestamp and values. Show column headers. Includes an iso timestamp.\n\n###### options\n- `iso_timestamp`: `True`\n- `header_array`: `row`\n- `roll_up`: `False`\n- `data_axis`: `column`",
+ "enum" : [ "HEADER_ROW" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf": _render_mode_one_of_model_schema})
+
+_render_mode_one_of_1_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_1",
+ "type" : "string",
+ "description" : "Render rows of timestamp and values. Show column headers.\n\n###### options\n- `iso_timestamp`: `False`\n- `header_array`: `row`\n- `roll_up`: `False`\n- `data_axis`: `column`",
+ "enum" : [ "COMPACT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_1": _render_mode_one_of_1_model_schema})
+
+_render_mode_one_of_2_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_2",
+ "type" : "string",
+ "description" : "Render rows of timestamp and values. Show column headers. Show the time window attributes.\n\n###### options\n- `iso_timestamp`: `False`\n- `header_array`: `row`\n- `roll_up`: `False`\n- `data_axis`: `column`\n- `include_window_spec`: `True`",
+ "enum" : [ "COMPACT_WS" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_2": _render_mode_one_of_2_model_schema})
+
+_render_mode_one_of_3_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_3",
+ "type" : "string",
+ "description" : "Render timestamps and each series (column) as a values array. Show column headers.\n\n###### options\n- `iso_timestamp`: `False`\n- `header_array`: `row`\n- `data_axis`: `row`\n- `roll_up`: `True`\n- `include_window_spec`: `True`",
+ "enum" : [ "SERIES" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_3": _render_mode_one_of_3_model_schema})
+
+_render_mode_one_of_4_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_4",
+ "type" : "string",
+ "description" : "Renders row index in `rows`, and each series as a values array.\n\nThe series are prefixed by their series attributes.The `rows` index is prefixed by the labels for these attributes.\n\n###### options\n- `iso_timestamp`: `True`\n- `header_array`: `column`\n- `roll_up`: `False`\n- `data_axis`: `row`",
+ "enum" : [ "HEADER_COLUMN" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_4": _render_mode_one_of_4_model_schema})
+
+_render_mode_one_of_5_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_5",
+ "type" : "string",
+ "description" : "Render an object for each observation. Uses flattened keys.\n\n###### options\n- `iso_timestamp`: `True`\n- `hierarchical`: `False`\n- `show_levels`: `True`\n- `roll_up`: `False`",
+ "enum" : [ "FLAT_DICT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_5": _render_mode_one_of_5_model_schema})
+
+_render_mode_one_of_6_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_6",
+ "type" : "string",
+ "description" : "Render an hierarchical object for each observation. Shows an iso timestamp.\n\n###### options\n- `iso_timestamp`: `True`\n- `hierarchical`: `True`\n- `show_levels`: `True`\n- `roll_up`: `True`",
+ "enum" : [ "HIER_DICT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_6": _render_mode_one_of_6_model_schema})
+
+_render_mode_one_of_7_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_7",
+ "type" : "string",
+ "description" : "Render an object with metric keys for each observation. Shows an iso timestamp.\n\n###### options\n- `iso_timestamp`: `True`\n- `hierarchical`: `['metric']`\n- `show_levels`: `False`\n- `roll_up`: `True`\n- `key_skip_empty`: `True`",
+ "enum" : [ "METRIC_FLAT_DICT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_7": _render_mode_one_of_7_model_schema})
+
+_render_mode_one_of_8_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_8",
+ "type" : "string",
+ "description" : "Render in an object format compatible with the `/data/v1/events` upload.\n\n###### options\n- `iso_timestamp`: `False`\n- `hierarchical`: `False`\n- `show_levels`: `False`\n- `roll_up`: `True`",
+ "enum" : [ "UPLOAD" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_8": _render_mode_one_of_8_model_schema})
+
+_render_mode_one_of_9_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_9",
+ "type" : "string",
+ "description" : "Render in csv format with row headers.\n\n###### options\n- `iso_timestamp`: `False`",
+ "enum" : [ "CSV" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"_RenderMode_oneOf_9": _render_mode_one_of_9_model_schema})
+
+_response_data_set_model_schema = json.loads(
+ r"""{
+ "title" : "Response Data Set",
+ "description" : "Result timeseries data set, with one time dimension.",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/RowDataSet"
+ }, {
+ "$ref" : "#/components/schemas/SeriesDataSet"
+ }, {
+ "$ref" : "#/components/schemas/ColumnDataSet"
+ }, {
+ "$ref" : "#/components/schemas/ObjectDataSet"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Response_Data_Set": _response_data_set_model_schema})
+
+_row_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "columns", "data" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/RowDataSet_data_axis"
+ },
+ "columns" : {
+ "title" : "Column Headers",
+ "type" : "array",
+ "description" : "Header Attributes for the column data.\n\nThe initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps).\n\nThe remaining object-valued column headers identify and describe the actual series data.",
+ "items" : {
+ "$ref" : "#/components/schemas/Column_Headers_inner"
+ },
+ "x-prefixItems" : [ {
+ "const" : "timestamp",
+ "title" : "Unix epoch milliseconds timestamp."
+ } ]
+ },
+ "data" : {
+ "title" : "Data",
+ "type" : "array",
+ "items" : {
+ "title" : "Observation",
+ "type" : "array",
+ "description" : "Row index data (timestamp), and a value for each of the series.",
+ "items" : {
+ "$ref" : "#/components/schemas/Datum"
+ },
+ "x-prefixItems" : [ {
+ "$ref" : "#/components/schemas/Timestamp"
+ } ]
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Row-oriented dataset.\n\nTimeseries data layout with a column header and a data row per timestamp.\nResult for render options `data_axis=column` and `header_array=row`.\","
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"RowDataSet": _row_data_set_model_schema})
+
+_row_data_set_data_axis_model_schema = json.loads(
+ r"""{
+ "title" : "RowDataSet_data_axis",
+ "type" : "string",
+ "default" : "column",
+ "enum" : [ "column" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"RowDataSet_data_axis": _row_data_set_data_axis_model_schema})
+
+_row_header_model_schema = json.loads(
+ r"""{
+ "required" : [ "timestamp" ],
+ "type" : "object",
+ "properties" : {
+ "timestamp" : {
+ "$ref" : "#/components/schemas/Timestamp"
+ },
+ "timestamp_iso" : {
+ "$ref" : "#/components/schemas/TimestampIso"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Index entry attributes.\n\nAttributes for a timestamp index entry."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"RowHeader": _row_header_model_schema})
+
+_row_headers_inner_model_schema = json.loads(
+ r"""{
+ "title" : "Row_Headers_inner",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/ColumnIndexRowHeader"
+ }, {
+ "$ref" : "#/components/schemas/RowHeader"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Row_Headers_inner": _row_headers_inner_model_schema})
+
+_series_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "columns", "data" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/ColumnDataSet_data_axis"
+ },
+ "columns" : {
+ "title" : "Column Headers",
+ "type" : "array",
+ "description" : "Header Attributes for the column data.\n\nThe initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps).\n\nThe remaining object-valued column headers identify and describe the actual series data.",
+ "items" : {
+ "$ref" : "#/components/schemas/Column_Headers_inner"
+ },
+ "x-prefixItems" : [ {
+ "const" : "timestamp",
+ "title" : "Unix epoch milliseconds timestamp."
+ } ]
+ },
+ "data" : {
+ "title" : "Data",
+ "type" : "array",
+ "items" : {
+ "title" : "Series",
+ "type" : "array",
+ "description" : "All metric observation values for a single series.",
+ "items" : {
+ "$ref" : "#/components/schemas/Datum"
+ }
+ },
+ "x-prefixItems" : [ {
+ "items" : {
+ "$ref" : "#/components/schemas/Timestamp"
+ },
+ "type" : "array",
+ "title" : "Timestamp Index",
+ "description" : "The timestamp index for this result data."
+ } ]
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Column-oriented dataset.\n\nTimeseries data layout with a column header\nand a seperate data array for the time index and each series.\nResult for render options `data_axis=row` and `header_array=row`."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"SeriesDataSet": _series_data_set_model_schema})
+
+_series_spec_model_schema = json.loads(
+ r"""{
+ "title" : "SeriesSpec",
+ "type" : "object",
+ "properties" : {
+ "name" : {
+ "title" : "Name",
+ "type" : "string",
+ "description" : "Optional alias name for the series. This name is used when exporting the dataset to CSV format.",
+ "example" : "demoQuery"
+ },
+ "resource" : {
+ "title" : "Resource",
+ "type" : "string",
+ "description" : "Resource id for the series, required unless it is specified as a query default.",
+ "example" : "13efb488-75ac-4dac-828a-d49c5c2ebbfc"
+ },
+ "metric" : {
+ "title" : "Metric",
+ "type" : "string",
+ "description" : "Metric name for the series, required unless it is specified as a query default.",
+ "example" : "temperature"
+ },
+ "aggregration" : {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ },
+ "interpolation" : {
+ "$ref" : "#/components/schemas/Interpolation"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Query specification for a single series."
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"SeriesSpec": _series_spec_model_schema})
+
+_time_window_from_model_schema = json.loads(
+ r"""{
+ "title" : "Time Window From",
+ "description" : "The start of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 absolute timestamp",
+ "pattern" : "[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?",
+ "type" : "string",
+ "description" : "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ "format" : "date-time",
+ "example" : "2018-03-21T12:23:00+01:00"
+ }, {
+ "title" : "UNIX epoch milliseconds",
+ "minimum" : 0,
+ "type" : "integer",
+ "description" : "Absolute timestamp milliseconds in unix epoch since 1970-01-01.",
+ "example" : 1534836422284
+ }, {
+ "title" : "ISO8601 Period Before Now",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Time_Window_From": _time_window_from_model_schema})
+
+_time_window_until_model_schema = json.loads(
+ r"""{
+ "title" : "Time Window Until",
+ "description" : "The end (not-inclusive) of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 absolute timestamp",
+ "pattern" : "[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?",
+ "type" : "string",
+ "description" : "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ "format" : "date-time",
+ "example" : "2018-03-21T12:23:00+01:00"
+ }, {
+ "title" : "UNIX epoch milliseconds",
+ "minimum" : 0,
+ "type" : "integer",
+ "description" : "Absolute timestamp milliseconds in unix epoch since 1970-01-01.",
+ "example" : 1534836422284
+ }, {
+ "title" : "ISO8601 Period Before Now",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Time_Window_Until": _time_window_until_model_schema})
+
+_validation_error_model_schema = json.loads(
+ r"""{
+ "title" : "ValidationError",
+ "required" : [ "loc", "msg", "type" ],
+ "type" : "object",
+ "properties" : {
+ "loc" : {
+ "title" : "Location",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Location_inner"
+ }
+ },
+ "msg" : {
+ "title" : "Message",
+ "type" : "string"
+ },
+ "type" : {
+ "title" : "Error Type",
+ "type" : "string"
+ }
+ }
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"ValidationError": _validation_error_model_schema})
+
+_window_model_schema = json.loads(
+ r"""{
+ "title" : "Window",
+ "type" : "string",
+ "description" : "The absolute size of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Window": _window_model_schema})
+
+_window_override__model_schema = json.loads(
+ r"""{
+ "title" : "Window Override.",
+ "type" : "string",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+MODEL_DEFINITIONS.update({"Window_Override_": _window_override__model_schema})
diff --git a/test/types/__init__.py b/test/types/__init__.py
new file mode 100644
index 0000000..81e66c8
--- /dev/null
+++ b/test/types/__init__.py
@@ -0,0 +1 @@
+"""Waylay Query: timeseries queries (v1 protocol) REST model tests."""
diff --git a/test/types/__pycache__/__init__.cpython-311.pyc b/test/types/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..a05ed69
Binary files /dev/null and b/test/types/__pycache__/__init__.cpython-311.pyc differ
diff --git a/test/types/__pycache__/aggregation_method_stub.cpython-311.pyc b/test/types/__pycache__/aggregation_method_stub.cpython-311.pyc
new file mode 100644
index 0000000..5178bbc
Binary files /dev/null and b/test/types/__pycache__/aggregation_method_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/delete_response_stub.cpython-311.pyc b/test/types/__pycache__/delete_response_stub.cpython-311.pyc
new file mode 100644
index 0000000..0ae2bc6
Binary files /dev/null and b/test/types/__pycache__/delete_response_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/from_override_stub.cpython-311.pyc b/test/types/__pycache__/from_override_stub.cpython-311.pyc
new file mode 100644
index 0000000..23c48d9
Binary files /dev/null and b/test/types/__pycache__/from_override_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/grouping_interval_override_stub.cpython-311.pyc b/test/types/__pycache__/grouping_interval_override_stub.cpython-311.pyc
new file mode 100644
index 0000000..182ceff
Binary files /dev/null and b/test/types/__pycache__/grouping_interval_override_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/interpolation_stub.cpython-311.pyc b/test/types/__pycache__/interpolation_stub.cpython-311.pyc
new file mode 100644
index 0000000..e1cf8e4
Binary files /dev/null and b/test/types/__pycache__/interpolation_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/queries_list_response_stub.cpython-311.pyc b/test/types/__pycache__/queries_list_response_stub.cpython-311.pyc
new file mode 100644
index 0000000..975eef8
Binary files /dev/null and b/test/types/__pycache__/queries_list_response_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/query_definition_stub.cpython-311.pyc b/test/types/__pycache__/query_definition_stub.cpython-311.pyc
new file mode 100644
index 0000000..8f5314f
Binary files /dev/null and b/test/types/__pycache__/query_definition_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/query_entity_input_stub.cpython-311.pyc b/test/types/__pycache__/query_entity_input_stub.cpython-311.pyc
new file mode 100644
index 0000000..e6b1a2c
Binary files /dev/null and b/test/types/__pycache__/query_entity_input_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/query_input_stub.cpython-311.pyc b/test/types/__pycache__/query_input_stub.cpython-311.pyc
new file mode 100644
index 0000000..d9478c2
Binary files /dev/null and b/test/types/__pycache__/query_input_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/query_response_stub.cpython-311.pyc b/test/types/__pycache__/query_response_stub.cpython-311.pyc
new file mode 100644
index 0000000..ae0f381
Binary files /dev/null and b/test/types/__pycache__/query_response_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/query_result_stub.cpython-311.pyc b/test/types/__pycache__/query_result_stub.cpython-311.pyc
new file mode 100644
index 0000000..7296450
Binary files /dev/null and b/test/types/__pycache__/query_result_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/render1_stub.cpython-311.pyc b/test/types/__pycache__/render1_stub.cpython-311.pyc
new file mode 100644
index 0000000..1976d1b
Binary files /dev/null and b/test/types/__pycache__/render1_stub.cpython-311.pyc differ
diff --git a/test/types/__pycache__/window_override_stub.cpython-311.pyc b/test/types/__pycache__/window_override_stub.cpython-311.pyc
new file mode 100644
index 0000000..48cb0cc
Binary files /dev/null and b/test/types/__pycache__/window_override_stub.cpython-311.pyc differ
diff --git a/test/types/aggregation_by_resource_and_metric_stub.py b/test/types/aggregation_by_resource_and_metric_stub.py
new file mode 100644
index 0000000..ca4d1b5
--- /dev/null
+++ b/test/types/aggregation_by_resource_and_metric_stub.py
@@ -0,0 +1,77 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_by_resource_and_metric import (
+ AggregationByResourceAndMetric,
+ )
+
+ AggregationByResourceAndMetricAdapter = TypeAdapter(AggregationByResourceAndMetric)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_by_resource_and_metric_model_schema = json.loads(
+ r"""{
+ "anyOf" : [ {
+ "title" : "Aggregation by Resource or Metric",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/Aggregation_by_Resource_or_Metric"
+ },
+ "description" : "Aggregation methods specified per resource or metric.",
+ "nullable" : true
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_by_resource_and_metric_model_schema.update({
+ "definitions": MODEL_DEFINITIONS
+})
+
+aggregation_by_resource_and_metric_faker = JSF(
+ aggregation_by_resource_and_metric_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationByResourceAndMetricStub:
+ """AggregationByResourceAndMetric unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_by_resource_and_metric_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationByResourceAndMetric":
+ """Create AggregationByResourceAndMetric stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationByResourceAndMetricAdapter.json_schema(),
+ allow_none_optionals=1,
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationByResourceAndMetricAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_by_resource_or_metric_stub.py b/test/types/aggregation_by_resource_or_metric_stub.py
new file mode 100644
index 0000000..ccd6b14
--- /dev/null
+++ b/test/types/aggregation_by_resource_or_metric_stub.py
@@ -0,0 +1,79 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_by_resource_or_metric import (
+ AggregationByResourceOrMetric,
+ )
+
+ AggregationByResourceOrMetricAdapter = TypeAdapter(AggregationByResourceOrMetric)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_by_resource_or_metric_model_schema = json.loads(
+ r"""{
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ }, {
+ "title" : "Aggregations",
+ "type" : "array",
+ "description" : "Aggregation methods, leading to sepearate series.",
+ "nullable" : true,
+ "items" : {
+ "$ref" : "#/components/schemas/Aggregations_inner"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_by_resource_or_metric_model_schema.update({
+ "definitions": MODEL_DEFINITIONS
+})
+
+aggregation_by_resource_or_metric_faker = JSF(
+ aggregation_by_resource_or_metric_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationByResourceOrMetricStub:
+ """AggregationByResourceOrMetric unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_by_resource_or_metric_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationByResourceOrMetric":
+ """Create AggregationByResourceOrMetric stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationByResourceOrMetricAdapter.json_schema(),
+ allow_none_optionals=1,
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationByResourceOrMetricAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of1_stub.py b/test/types/aggregation_method_one_of1_stub.py
new file mode 100644
index 0000000..72a5f81
--- /dev/null
+++ b/test/types/aggregation_method_one_of1_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of1 import (
+ AggregationMethodOneOf1,
+ )
+
+ AggregationMethodOneOf1Adapter = TypeAdapter(AggregationMethodOneOf1)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_1_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_1",
+ "type" : "string",
+ "description" : "Use the last value (in time) to represent all data for the sample interval.",
+ "enum" : [ "last" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_1_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_1_faker = JSF(
+ aggregation_method_one_of_1_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf1Stub:
+ """AggregationMethodOneOf1 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_1_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf1":
+ """Create AggregationMethodOneOf1 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf1Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf1Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of2_stub.py b/test/types/aggregation_method_one_of2_stub.py
new file mode 100644
index 0000000..1abb11a
--- /dev/null
+++ b/test/types/aggregation_method_one_of2_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of2 import (
+ AggregationMethodOneOf2,
+ )
+
+ AggregationMethodOneOf2Adapter = TypeAdapter(AggregationMethodOneOf2)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_2_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_2",
+ "type" : "string",
+ "description" : "Aggregate data by the mean value: The sum of values divided by number of observations.",
+ "enum" : [ "mean" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_2_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_2_faker = JSF(
+ aggregation_method_one_of_2_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf2Stub:
+ """AggregationMethodOneOf2 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_2_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf2":
+ """Create AggregationMethodOneOf2 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf2Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf2Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of3_stub.py b/test/types/aggregation_method_one_of3_stub.py
new file mode 100644
index 0000000..880b45e
--- /dev/null
+++ b/test/types/aggregation_method_one_of3_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of3 import (
+ AggregationMethodOneOf3,
+ )
+
+ AggregationMethodOneOf3Adapter = TypeAdapter(AggregationMethodOneOf3)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_3_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_3",
+ "type" : "string",
+ "description" : "Aggregate data by the median value: The n/2-th value when ordered, the average of the (n-1)/2-th and (n+1)/2-th value when n is uneven.",
+ "enum" : [ "median" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_3_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_3_faker = JSF(
+ aggregation_method_one_of_3_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf3Stub:
+ """AggregationMethodOneOf3 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_3_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf3":
+ """Create AggregationMethodOneOf3 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf3Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf3Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of4_stub.py b/test/types/aggregation_method_one_of4_stub.py
new file mode 100644
index 0000000..40187cb
--- /dev/null
+++ b/test/types/aggregation_method_one_of4_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of4 import (
+ AggregationMethodOneOf4,
+ )
+
+ AggregationMethodOneOf4Adapter = TypeAdapter(AggregationMethodOneOf4)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_4_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_4",
+ "type" : "string",
+ "description" : "The sum of all values summarizes the data for the sample interval.",
+ "enum" : [ "sum" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_4_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_4_faker = JSF(
+ aggregation_method_one_of_4_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf4Stub:
+ """AggregationMethodOneOf4 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_4_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf4":
+ """Create AggregationMethodOneOf4 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf4Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf4Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of5_stub.py b/test/types/aggregation_method_one_of5_stub.py
new file mode 100644
index 0000000..309932b
--- /dev/null
+++ b/test/types/aggregation_method_one_of5_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of5 import (
+ AggregationMethodOneOf5,
+ )
+
+ AggregationMethodOneOf5Adapter = TypeAdapter(AggregationMethodOneOf5)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_5_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_5",
+ "type" : "string",
+ "description" : "Use the count of observations in the sample interval.",
+ "enum" : [ "count" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_5_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_5_faker = JSF(
+ aggregation_method_one_of_5_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf5Stub:
+ """AggregationMethodOneOf5 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_5_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf5":
+ """Create AggregationMethodOneOf5 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf5Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf5Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of6_stub.py b/test/types/aggregation_method_one_of6_stub.py
new file mode 100644
index 0000000..0cb80fe
--- /dev/null
+++ b/test/types/aggregation_method_one_of6_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of6 import (
+ AggregationMethodOneOf6,
+ )
+
+ AggregationMethodOneOf6Adapter = TypeAdapter(AggregationMethodOneOf6)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_6_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_6",
+ "type" : "string",
+ "description" : "Use the standard deviation of all observations in the sample interval.",
+ "enum" : [ "std" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_6_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_6_faker = JSF(
+ aggregation_method_one_of_6_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf6Stub:
+ """AggregationMethodOneOf6 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_6_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf6":
+ """Create AggregationMethodOneOf6 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf6Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf6Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of7_stub.py b/test/types/aggregation_method_one_of7_stub.py
new file mode 100644
index 0000000..4de5489
--- /dev/null
+++ b/test/types/aggregation_method_one_of7_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of7 import (
+ AggregationMethodOneOf7,
+ )
+
+ AggregationMethodOneOf7Adapter = TypeAdapter(AggregationMethodOneOf7)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_7_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_7",
+ "type" : "string",
+ "description" : "Use the maximum of all values in the sample interval.",
+ "enum" : [ "max" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_7_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_7_faker = JSF(
+ aggregation_method_one_of_7_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf7Stub:
+ """AggregationMethodOneOf7 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_7_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf7":
+ """Create AggregationMethodOneOf7 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf7Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf7Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of8_stub.py b/test/types/aggregation_method_one_of8_stub.py
new file mode 100644
index 0000000..ba8f966
--- /dev/null
+++ b/test/types/aggregation_method_one_of8_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of8 import (
+ AggregationMethodOneOf8,
+ )
+
+ AggregationMethodOneOf8Adapter = TypeAdapter(AggregationMethodOneOf8)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_8_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf_8",
+ "type" : "string",
+ "description" : "Use the minimum of all values in the sample interval.",
+ "enum" : [ "min" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_8_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_8_faker = JSF(
+ aggregation_method_one_of_8_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOf8Stub:
+ """AggregationMethodOneOf8 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_8_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf8":
+ """Create AggregationMethodOneOf8 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOf8Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOf8Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_one_of_stub.py b/test/types/aggregation_method_one_of_stub.py
new file mode 100644
index 0000000..60561e6
--- /dev/null
+++ b/test/types/aggregation_method_one_of_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method_one_of import (
+ AggregationMethodOneOf,
+ )
+
+ AggregationMethodOneOfAdapter = TypeAdapter(AggregationMethodOneOf)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_one_of_model_schema = json.loads(
+ r"""{
+ "title" : "AggregationMethod_oneOf",
+ "type" : "string",
+ "description" : "Use the first value (in time) to represent all data for the sample interval.",
+ "enum" : [ "first" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_one_of_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_one_of_faker = JSF(
+ aggregation_method_one_of_model_schema, allow_none_optionals=1
+)
+
+
+class AggregationMethodOneOfStub:
+ """AggregationMethodOneOf unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_one_of_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethodOneOf":
+ """Create AggregationMethodOneOf stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodOneOfAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodOneOfAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregation_method_stub.py b/test/types/aggregation_method_stub.py
new file mode 100644
index 0000000..4d5f54e
--- /dev/null
+++ b/test/types/aggregation_method_stub.py
@@ -0,0 +1,80 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregation_method import AggregationMethod
+
+ AggregationMethodAdapter = TypeAdapter(AggregationMethod)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregation_method_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "nullable" : true,
+ "oneOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_1"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_2"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_3"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_4"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_5"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_6"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_7"
+ }, {
+ "$ref" : "#/components/schemas/AggregationMethod_oneOf_8"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregation_method_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregation_method_faker = JSF(aggregation_method_model_schema, allow_none_optionals=1)
+
+
+class AggregationMethodStub:
+ """AggregationMethod unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregation_method_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "AggregationMethod":
+ """Create AggregationMethod stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationMethodAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationMethodAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/aggregations_inner_stub.py b/test/types/aggregations_inner_stub.py
new file mode 100644
index 0000000..6d77087
--- /dev/null
+++ b/test/types/aggregations_inner_stub.py
@@ -0,0 +1,62 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.aggregations_inner import AggregationsInner
+
+ AggregationsInnerAdapter = TypeAdapter(AggregationsInner)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+aggregations_inner_model_schema = json.loads(
+ r"""{
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+aggregations_inner_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+aggregations_inner_faker = JSF(aggregations_inner_model_schema, allow_none_optionals=1)
+
+
+class AggregationsInnerStub:
+ """AggregationsInner unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return aggregations_inner_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "AggregationsInner":
+ """Create AggregationsInner stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AggregationsInnerAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AggregationsInnerAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/align_at_stub.py b/test/types/align_at_stub.py
new file mode 100644
index 0000000..3aecf35
--- /dev/null
+++ b/test/types/align_at_stub.py
@@ -0,0 +1,58 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.align_at import AlignAt
+
+ AlignAtAdapter = TypeAdapter(AlignAt)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+align_at_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Possible values for `align.at`.\n\n* 'grid' Align to a fixed grid (possibly using timezone information)\n* 'from' Align a the `from` boundary\n* 'until' Align a the `until` boundary\n* 'boundary' Align a the `from` boundary if specified,\n otherwise the `until` boundary.\n\nWhen not specified, 'grid' is used.",
+ "enum" : [ "grid", "boundary", "from", "until" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+align_at_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+align_at_faker = JSF(align_at_model_schema, allow_none_optionals=1)
+
+
+class AlignAtStub:
+ """AlignAt unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return align_at_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "AlignAt":
+ """Create AlignAt stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(AlignAtAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AlignAtAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/align_shift_stub.py b/test/types/align_shift_stub.py
new file mode 100644
index 0000000..8534d66
--- /dev/null
+++ b/test/types/align_shift_stub.py
@@ -0,0 +1,60 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.align_shift import AlignShift
+
+ AlignShiftAdapter = TypeAdapter(AlignShift)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+align_shift_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Possible values for `align.shift`.\n\n* 'backward': keep the window size of the original interval specification,\n shifting back.\n* 'forward': keep the window size of the original interval specification,\n shifting forward.\n* 'wrap': enlarge the window size to include all of the original interval.\n\nWhen not specified, 'backward' is used.",
+ "enum" : [ "backward", "forward", "wrap" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+align_shift_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+align_shift_faker = JSF(align_shift_model_schema, allow_none_optionals=1)
+
+
+class AlignShiftStub:
+ """AlignShift unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return align_shift_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "AlignShift":
+ """Create AlignShift stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(AlignShiftAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AlignShiftAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/alignment_grid_interval_stub.py b/test/types/alignment_grid_interval_stub.py
new file mode 100644
index 0000000..a8f4aa3
--- /dev/null
+++ b/test/types/alignment_grid_interval_stub.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.alignment_grid_interval import (
+ AlignmentGridInterval,
+ )
+
+ AlignmentGridIntervalAdapter = TypeAdapter(AlignmentGridInterval)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+alignment_grid_interval__model_schema = json.loads(
+ r"""{
+ "title" : "Alignment Grid interval.",
+ "type" : "string",
+ "description" : "\nDefines the grid used to align the aggregation window.\nThe window will align at whole-unit multiples of this interval.\n\nFor intervals like `PT1D`, that are timezone-dependent, use the \n`align.timezone` to fix the absolute timestamp of the grid boundaries.\n\nIf not specified, defaults to the `freq` aggregation interval.\n",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ }, {
+ "$ref" : "#/components/schemas/Grouping_Interval_Override_oneOf"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+alignment_grid_interval__model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+alignment_grid_interval__faker = JSF(
+ alignment_grid_interval__model_schema, allow_none_optionals=1
+)
+
+
+class AlignmentGridIntervalStub:
+ """AlignmentGridInterval unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return alignment_grid_interval__faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "AlignmentGridInterval":
+ """Create AlignmentGridInterval stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AlignmentGridIntervalAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AlignmentGridIntervalAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/alignment_stub.py b/test/types/alignment_stub.py
new file mode 100644
index 0000000..90e1483
--- /dev/null
+++ b/test/types/alignment_stub.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.alignment import Alignment
+
+ AlignmentAdapter = TypeAdapter(Alignment)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+alignment_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "at" : {
+ "$ref" : "#/components/schemas/AlignAt"
+ },
+ "shift" : {
+ "$ref" : "#/components/schemas/AlignShift"
+ },
+ "freq" : {
+ "$ref" : "#/components/schemas/Alignment_Grid_interval_"
+ },
+ "timezone" : {
+ "$ref" : "#/components/schemas/Alignment_Timezone_"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Aggregation Alignment Options.\n\nSpecifies how the aggregation grid is aligned."
+}
+""",
+ object_hook=with_example_provider,
+)
+alignment_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+alignment_faker = JSF(alignment_model_schema, allow_none_optionals=1)
+
+
+class AlignmentStub:
+ """Alignment unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return alignment_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Alignment":
+ """Create Alignment stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(AlignmentAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AlignmentAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/alignment_timezone_stub.py b/test/types/alignment_timezone_stub.py
new file mode 100644
index 0000000..86b3a02
--- /dev/null
+++ b/test/types/alignment_timezone_stub.py
@@ -0,0 +1,74 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.alignment_timezone import AlignmentTimezone
+
+ AlignmentTimezoneAdapter = TypeAdapter(AlignmentTimezone)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+alignment_timezone__model_schema = json.loads(
+ r"""{
+ "title" : "Alignment Timezone.",
+ "type" : "string",
+ "description" : "\nThe timezone to use when shifting boundaries, especially\nat day granularity.\nAlso affects the rendering of timestamps when\n`render.iso_timestamp` is enabled.\n\nWhen not specified, the `UTC` timezone is used.\n",
+ "oneOf" : [ {
+ "title" : "Timezone Identifier",
+ "type" : "string",
+ "description" : "[ICANN timezone identifier](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)"
+ }, {
+ "title" : "UTC Offset",
+ "pattern" : "(+|-)\\d\\d:\\d\\d",
+ "type" : "string",
+ "description" : "[UTC offset](https://en.wikipedia.org/wiki/UTC_offset)"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+alignment_timezone__model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+alignment_timezone__faker = JSF(
+ alignment_timezone__model_schema, allow_none_optionals=1
+)
+
+
+class AlignmentTimezoneStub:
+ """AlignmentTimezone unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return alignment_timezone__faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "AlignmentTimezone":
+ """Create AlignmentTimezone stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ AlignmentTimezoneAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return AlignmentTimezoneAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/cause_exception_stub.py b/test/types/cause_exception_stub.py
new file mode 100644
index 0000000..50a9da4
--- /dev/null
+++ b/test/types/cause_exception_stub.py
@@ -0,0 +1,80 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.cause_exception import CauseException
+
+ CauseExceptionAdapter = TypeAdapter(CauseException)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+cause_exception_model_schema = json.loads(
+ r"""{
+ "required" : [ "message", "stacktrace", "type" ],
+ "type" : "object",
+ "properties" : {
+ "type" : {
+ "title" : "Exception Type",
+ "type" : "string"
+ },
+ "message" : {
+ "title" : "Exception Message",
+ "type" : "string"
+ },
+ "stacktrace" : {
+ "title" : "Stack Trace",
+ "type" : "array",
+ "items" : {
+ "type" : "string"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Describes the exception that caused a message."
+}
+""",
+ object_hook=with_example_provider,
+)
+cause_exception_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+cause_exception_faker = JSF(cause_exception_model_schema, allow_none_optionals=1)
+
+
+class CauseExceptionStub:
+ """CauseException unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return cause_exception_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "CauseException":
+ """Create CauseException stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ CauseExceptionAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return CauseExceptionAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/column_data_set_data_axis_stub.py b/test/types/column_data_set_data_axis_stub.py
new file mode 100644
index 0000000..cd85c8b
--- /dev/null
+++ b/test/types/column_data_set_data_axis_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.column_data_set_data_axis import (
+ ColumnDataSetDataAxis,
+ )
+
+ ColumnDataSetDataAxisAdapter = TypeAdapter(ColumnDataSetDataAxis)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+column_data_set_data_axis_model_schema = json.loads(
+ r"""{
+ "title" : "ColumnDataSet_data_axis",
+ "type" : "string",
+ "default" : "row",
+ "enum" : [ "row" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+column_data_set_data_axis_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+column_data_set_data_axis_faker = JSF(
+ column_data_set_data_axis_model_schema, allow_none_optionals=1
+)
+
+
+class ColumnDataSetDataAxisStub:
+ """ColumnDataSetDataAxis unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return column_data_set_data_axis_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "ColumnDataSetDataAxis":
+ """Create ColumnDataSetDataAxis stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ColumnDataSetDataAxisAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ColumnDataSetDataAxisAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/column_data_set_stub.py b/test/types/column_data_set_stub.py
new file mode 100644
index 0000000..9e3d668
--- /dev/null
+++ b/test/types/column_data_set_stub.py
@@ -0,0 +1,93 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.column_data_set import ColumnDataSet
+
+ ColumnDataSetAdapter = TypeAdapter(ColumnDataSet)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+column_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "data", "rows" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/ColumnDataSet_data_axis"
+ },
+ "rows" : {
+ "title" : "Row Headers",
+ "type" : "array",
+ "description" : "Header Attributes for the index data.\n\nThe initial string-valued headers (normally `resource`, `metric`,`aggregation`) indicate that row to contain series attributes.\n\nThe remaining object-valued row headers contain the index data.",
+ "items" : {
+ "$ref" : "#/components/schemas/Row_Headers_inner"
+ }
+ },
+ "data" : {
+ "title" : "Series",
+ "type" : "array",
+ "description" : "All metric observation values for a single series. Prefixed by the series attributes.",
+ "items" : {
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Datum"
+ }
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Column-oriented dataset with rows header.\n\nTimeseries data layout with a rows header containing\nthe index data.\nThe data array contains series data prefixed by series attributes.\nThe `rows` index is prefix by the names of these series attributes.\nResult for render options `data_axis=row` and `header_array=column`."
+}
+""",
+ object_hook=with_example_provider,
+)
+column_data_set_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+column_data_set_faker = JSF(column_data_set_model_schema, allow_none_optionals=1)
+
+
+class ColumnDataSetStub:
+ """ColumnDataSet unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return column_data_set_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ColumnDataSet":
+ """Create ColumnDataSet stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ColumnDataSetAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ColumnDataSetAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/column_header_stub.py b/test/types/column_header_stub.py
new file mode 100644
index 0000000..ae011cc
--- /dev/null
+++ b/test/types/column_header_stub.py
@@ -0,0 +1,77 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.column_header import ColumnHeader
+
+ ColumnHeaderAdapter = TypeAdapter(ColumnHeader)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+column_header_model_schema = json.loads(
+ r"""{
+ "required" : [ "metric", "resource" ],
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Series resource id",
+ "type" : "string"
+ },
+ "metric" : {
+ "title" : "Series metric",
+ "type" : "string"
+ },
+ "aggregation" : {
+ "title" : "Aggregation applied to the series.",
+ "type" : "string"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Column attributes.\n\nAttributes that identify and describe the data in this column."
+}
+""",
+ object_hook=with_example_provider,
+)
+column_header_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+column_header_faker = JSF(column_header_model_schema, allow_none_optionals=1)
+
+
+class ColumnHeaderStub:
+ """ColumnHeader unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return column_header_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ColumnHeader":
+ """Create ColumnHeader stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ColumnHeaderAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ColumnHeaderAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/column_headers_inner_stub.py b/test/types/column_headers_inner_stub.py
new file mode 100644
index 0000000..bdf14a0
--- /dev/null
+++ b/test/types/column_headers_inner_stub.py
@@ -0,0 +1,67 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.column_headers_inner import ColumnHeadersInner
+
+ ColumnHeadersInnerAdapter = TypeAdapter(ColumnHeadersInner)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+column_headers_inner_model_schema = json.loads(
+ r"""{
+ "title" : "Column_Headers_inner",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/RowIndexColumnHeader"
+ }, {
+ "$ref" : "#/components/schemas/ColumnHeader"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+column_headers_inner_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+column_headers_inner_faker = JSF(
+ column_headers_inner_model_schema, allow_none_optionals=1
+)
+
+
+class ColumnHeadersInnerStub:
+ """ColumnHeadersInner unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return column_headers_inner_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ColumnHeadersInner":
+ """Create ColumnHeadersInner stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ColumnHeadersInnerAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ColumnHeadersInnerAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/data_axis_option_stub.py b/test/types/data_axis_option_stub.py
new file mode 100644
index 0000000..a362d11
--- /dev/null
+++ b/test/types/data_axis_option_stub.py
@@ -0,0 +1,62 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.data_axis_option import DataAxisOption
+
+ DataAxisOptionAdapter = TypeAdapter(DataAxisOption)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+data_axis_option_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Allowed values for the render.data_axis option.",
+ "enum" : [ "row", "column" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+data_axis_option_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+data_axis_option_faker = JSF(data_axis_option_model_schema, allow_none_optionals=1)
+
+
+class DataAxisOptionStub:
+ """DataAxisOption unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return data_axis_option_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "DataAxisOption":
+ """Create DataAxisOption stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ DataAxisOptionAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DataAxisOptionAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/data_set_attributes_stub.py b/test/types/data_set_attributes_stub.py
new file mode 100644
index 0000000..0258bc6
--- /dev/null
+++ b/test/types/data_set_attributes_stub.py
@@ -0,0 +1,70 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.data_set_attributes import DataSetAttributes
+
+ DataSetAttributesAdapter = TypeAdapter(DataSetAttributes)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+data_set_attributes_model_schema = json.loads(
+ r"""{
+ "title" : "DataSetAttributes",
+ "type" : "object",
+ "properties" : {
+ "role" : {
+ "$ref" : "#/components/schemas/Role"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Data Set Attributes.\n\nData attributes that apply to all data in this set."
+}
+""",
+ object_hook=with_example_provider,
+)
+data_set_attributes_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+data_set_attributes_faker = JSF(
+ data_set_attributes_model_schema, allow_none_optionals=1
+)
+
+
+class DataSetAttributesStub:
+ """DataSetAttributes unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return data_set_attributes_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "DataSetAttributes":
+ """Create DataSetAttributes stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ DataSetAttributesAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DataSetAttributesAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/data_set_window_stub.py b/test/types/data_set_window_stub.py
new file mode 100644
index 0000000..2558bef
--- /dev/null
+++ b/test/types/data_set_window_stub.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.data_set_window import DataSetWindow
+
+ DataSetWindowAdapter = TypeAdapter(DataSetWindow)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+data_set_window_model_schema = json.loads(
+ r"""{
+ "title" : "DataSetWindow",
+ "required" : [ "freq", "until", "window" ],
+ "type" : "object",
+ "properties" : {
+ "until" : {
+ "title" : "Time Axis End",
+ "type" : "integer",
+ "description" : "Exclusive higher bound of the time axis in unix epoch milliseconds."
+ },
+ "window" : {
+ "title" : "Time Axis Length",
+ "type" : "string",
+ "description" : "Time axis length as ISO8601 period.",
+ "format" : "period"
+ },
+ "freq" : {
+ "title" : "Frequency",
+ "type" : "string",
+ "description" : "Time axis aggregation interval as an ISO8601 period .",
+ "format" : "period"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Data Window.\n\nStatistics of the time axis of a data set.\nPresent with render option `include_window_spec=true`.\","
+}
+""",
+ object_hook=with_example_provider,
+)
+data_set_window_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+data_set_window_faker = JSF(data_set_window_model_schema, allow_none_optionals=1)
+
+
+class DataSetWindowStub:
+ """DataSetWindow unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return data_set_window_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "DataSetWindow":
+ """Create DataSetWindow stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ DataSetWindowAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DataSetWindowAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/datum_stub.py b/test/types/datum_stub.py
new file mode 100644
index 0000000..1365a38
--- /dev/null
+++ b/test/types/datum_stub.py
@@ -0,0 +1,67 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.datum import Datum
+
+ DatumAdapter = TypeAdapter(Datum)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+datum_model_schema = json.loads(
+ r"""{
+ "title" : "Datum",
+ "description" : "A single metric value for a timeseries.\n\nA null value indicates that no (aggregated/interpolated) value exists for the corresponding timestamp.",
+ "oneOf" : [ {
+ "type" : "number",
+ "nullable" : true
+ }, {
+ "type" : "string",
+ "nullable" : true
+ }, {
+ "type" : "boolean",
+ "nullable" : true
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+datum_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+datum_faker = JSF(datum_model_schema, allow_none_optionals=1)
+
+
+class DatumStub:
+ """Datum unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return datum_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Datum":
+ """Create Datum stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(DatumAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DatumAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/default_aggregation_stub.py b/test/types/default_aggregation_stub.py
new file mode 100644
index 0000000..6120d1f
--- /dev/null
+++ b/test/types/default_aggregation_stub.py
@@ -0,0 +1,90 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.default_aggregation import DefaultAggregation
+
+ DefaultAggregationAdapter = TypeAdapter(DefaultAggregation)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+default_aggregation_model_schema = json.loads(
+ r"""{
+ "title" : "Default Aggregation",
+ "description" : "Default aggregation method(s) for the series in the query.",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ }, {
+ "title" : "Aggregations",
+ "type" : "array",
+ "description" : "Aggregation methods, leading to sepearate series.",
+ "nullable" : true,
+ "items" : {
+ "$ref" : "#/components/schemas/Aggregations_inner"
+ }
+ }, {
+ "title" : "Aggregation by Resource or Metric",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/Aggregation_by_Resource_or_Metric"
+ },
+ "description" : "Aggregation methods specified per resource or metric.",
+ "nullable" : true
+ }, {
+ "title" : "Aggregation by Resource and Metric",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/Aggregation_by_Resource_and_Metric"
+ },
+ "description" : "Aggregation methods specified per resource and metric.",
+ "nullable" : true
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+default_aggregation_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+default_aggregation_faker = JSF(
+ default_aggregation_model_schema, allow_none_optionals=1
+)
+
+
+class DefaultAggregationStub:
+ """DefaultAggregation unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return default_aggregation_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "DefaultAggregation":
+ """Create DefaultAggregation stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ DefaultAggregationAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DefaultAggregationAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/default_interpolation_stub.py b/test/types/default_interpolation_stub.py
new file mode 100644
index 0000000..1d9c1c8
--- /dev/null
+++ b/test/types/default_interpolation_stub.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.default_interpolation import (
+ DefaultInterpolation,
+ )
+
+ DefaultInterpolationAdapter = TypeAdapter(DefaultInterpolation)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+default_interpolation_model_schema = json.loads(
+ r"""{
+ "title" : "Default Interpolation",
+ "description" : "Default Interpolation method for the series (if aggregated).",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/InterpolationMethod"
+ }, {
+ "$ref" : "#/components/schemas/InterpolationSpec"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+default_interpolation_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+default_interpolation_faker = JSF(
+ default_interpolation_model_schema, allow_none_optionals=1
+)
+
+
+class DefaultInterpolationStub:
+ """DefaultInterpolation unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return default_interpolation_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "DefaultInterpolation":
+ """Create DefaultInterpolation stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ DefaultInterpolationAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DefaultInterpolationAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/delete_response_stub.py b/test/types/delete_response_stub.py
new file mode 100644
index 0000000..8ec8137
--- /dev/null
+++ b/test/types/delete_response_stub.py
@@ -0,0 +1,87 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.delete_response import DeleteResponse
+
+ DeleteResponseAdapter = TypeAdapter(DeleteResponse)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+delete_response_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "messages" : {
+ "title" : "Messages",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Message"
+ }
+ },
+ "_links" : {
+ "title" : " Links",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/_Links"
+ },
+ "description" : "HAL links, indexed by link relation."
+ },
+ "_embeddings" : {
+ "title" : " Embeddings",
+ "type" : "object",
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/_Embeddings"
+ },
+ "description" : "Hal embeddings, indexed by relation."
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Confirmation of a delete request."
+}
+""",
+ object_hook=with_example_provider,
+)
+delete_response_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+delete_response_faker = JSF(delete_response_model_schema, allow_none_optionals=1)
+
+
+class DeleteResponseStub:
+ """DeleteResponse unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return delete_response_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "DeleteResponse":
+ """Create DeleteResponse stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ DeleteResponseAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return DeleteResponseAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/embeddings_stub.py b/test/types/embeddings_stub.py
new file mode 100644
index 0000000..fa4ff67
--- /dev/null
+++ b/test/types/embeddings_stub.py
@@ -0,0 +1,66 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.embeddings import Embeddings
+
+ EmbeddingsAdapter = TypeAdapter(Embeddings)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+embeddings_model_schema = json.loads(
+ r"""{
+ "title" : "_Embeddings",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/HALEmbedding"
+ }, {
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/HALEmbedding"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+embeddings_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+embeddings_faker = JSF(embeddings_model_schema, allow_none_optionals=1)
+
+
+class EmbeddingsStub:
+ """Embeddings unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return embeddings_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Embeddings":
+ """Create Embeddings stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(EmbeddingsAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return EmbeddingsAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/from_override_stub.py b/test/types/from_override_stub.py
new file mode 100644
index 0000000..6700d95
--- /dev/null
+++ b/test/types/from_override_stub.py
@@ -0,0 +1,82 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.from_override import FromOverride
+
+ FromOverrideAdapter = TypeAdapter(FromOverride)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+from_override__model_schema = json.loads(
+ r"""{
+ "title" : "From Override.",
+ "type" : "string",
+ "oneOf" : [ {
+ "title" : "ISO8601 absolute timestamp",
+ "pattern" : "[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?",
+ "type" : "string",
+ "description" : "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ "format" : "date-time",
+ "example" : "2018-03-21T12:23:00+01:00"
+ }, {
+ "title" : "UNIX epoch milliseconds",
+ "minimum" : 0,
+ "type" : "integer",
+ "description" : "Absolute timestamp milliseconds in unix epoch since 1970-01-01.",
+ "example" : 1534836422284
+ }, {
+ "title" : "ISO8601 Period Before Now",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+from_override__model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+from_override__faker = JSF(from_override__model_schema, allow_none_optionals=1)
+
+
+class FromOverrideStub:
+ """FromOverride unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return from_override__faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "FromOverride":
+ """Create FromOverride stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ FromOverrideAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return FromOverrideAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/grouping_interval_override_one_of_stub.py b/test/types/grouping_interval_override_one_of_stub.py
new file mode 100644
index 0000000..65ddf4b
--- /dev/null
+++ b/test/types/grouping_interval_override_one_of_stub.py
@@ -0,0 +1,71 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.grouping_interval_override_one_of import (
+ GroupingIntervalOverrideOneOf,
+ )
+
+ GroupingIntervalOverrideOneOfAdapter = TypeAdapter(GroupingIntervalOverrideOneOf)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+grouping_interval_override_one_of_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "When `inferred` is specified, the frequency of aggregation will be inferred from the main/first time series. This can be used to regularize the time series",
+ "enum" : [ "inferred" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+grouping_interval_override_one_of_model_schema.update({
+ "definitions": MODEL_DEFINITIONS
+})
+
+grouping_interval_override_one_of_faker = JSF(
+ grouping_interval_override_one_of_model_schema, allow_none_optionals=1
+)
+
+
+class GroupingIntervalOverrideOneOfStub:
+ """GroupingIntervalOverrideOneOf unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return grouping_interval_override_one_of_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "GroupingIntervalOverrideOneOf":
+ """Create GroupingIntervalOverrideOneOf stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ GroupingIntervalOverrideOneOfAdapter.json_schema(),
+ allow_none_optionals=1,
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return GroupingIntervalOverrideOneOfAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/grouping_interval_override_stub.py b/test/types/grouping_interval_override_stub.py
new file mode 100644
index 0000000..3b34ce4
--- /dev/null
+++ b/test/types/grouping_interval_override_stub.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.grouping_interval_override import (
+ GroupingIntervalOverride,
+ )
+
+ GroupingIntervalOverrideAdapter = TypeAdapter(GroupingIntervalOverride)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+grouping_interval_override_model_schema = json.loads(
+ r"""{
+ "title" : "Grouping Interval Override",
+ "type" : "string",
+ "description" : "Override for the `freq` query attribute.",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ }, {
+ "$ref" : "#/components/schemas/Grouping_Interval_Override_oneOf"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+grouping_interval_override_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+grouping_interval_override_faker = JSF(
+ grouping_interval_override_model_schema, allow_none_optionals=1
+)
+
+
+class GroupingIntervalOverrideStub:
+ """GroupingIntervalOverride unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return grouping_interval_override_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "GroupingIntervalOverride":
+ """Create GroupingIntervalOverride stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ GroupingIntervalOverrideAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return GroupingIntervalOverrideAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/grouping_interval_stub.py b/test/types/grouping_interval_stub.py
new file mode 100644
index 0000000..0e84994
--- /dev/null
+++ b/test/types/grouping_interval_stub.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.grouping_interval import GroupingInterval
+
+ GroupingIntervalAdapter = TypeAdapter(GroupingInterval)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+grouping_interval_model_schema = json.loads(
+ r"""{
+ "title" : "Grouping interval",
+ "type" : "string",
+ "description" : "Interval used to aggregate or regularize data. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ }, {
+ "$ref" : "#/components/schemas/Grouping_Interval_Override_oneOf"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+grouping_interval_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+grouping_interval_faker = JSF(grouping_interval_model_schema, allow_none_optionals=1)
+
+
+class GroupingIntervalStub:
+ """GroupingInterval unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return grouping_interval_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "GroupingInterval":
+ """Create GroupingInterval stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ GroupingIntervalAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return GroupingIntervalAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/hal_link_method_stub.py b/test/types/hal_link_method_stub.py
new file mode 100644
index 0000000..2a0c0dd
--- /dev/null
+++ b/test/types/hal_link_method_stub.py
@@ -0,0 +1,63 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.hal_link_method import HALLinkMethod
+
+ HALLinkMethodAdapter = TypeAdapter(HALLinkMethod)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+hal_link_method_model_schema = json.loads(
+ r"""{
+ "title" : "HALLinkMethod",
+ "type" : "string",
+ "description" : "An http method that can be specified in a HAL link.",
+ "enum" : [ "GET", "POST", "PUT", "DELETE", "PATCH" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+hal_link_method_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+hal_link_method_faker = JSF(hal_link_method_model_schema, allow_none_optionals=1)
+
+
+class HALLinkMethodStub:
+ """HALLinkMethod unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return hal_link_method_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "HALLinkMethod":
+ """Create HALLinkMethod stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ HALLinkMethodAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return HALLinkMethodAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/hal_link_role_stub.py b/test/types/hal_link_role_stub.py
new file mode 100644
index 0000000..f0ab95c
--- /dev/null
+++ b/test/types/hal_link_role_stub.py
@@ -0,0 +1,60 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.hal_link_role import HALLinkRole
+
+ HALLinkRoleAdapter = TypeAdapter(HALLinkRole)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+hal_link_role_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Supported link and embedding roles in HAL representations.",
+ "enum" : [ "self", "first", "prev", "next", "last", "execute" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+hal_link_role_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+hal_link_role_faker = JSF(hal_link_role_model_schema, allow_none_optionals=1)
+
+
+class HALLinkRoleStub:
+ """HALLinkRole unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return hal_link_role_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "HALLinkRole":
+ """Create HALLinkRole stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(HALLinkRoleAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return HALLinkRoleAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/hal_link_stub.py b/test/types/hal_link_stub.py
new file mode 100644
index 0000000..222255c
--- /dev/null
+++ b/test/types/hal_link_stub.py
@@ -0,0 +1,74 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.hal_link import HALLink
+
+ HALLinkAdapter = TypeAdapter(HALLink)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+hal_link_model_schema = json.loads(
+ r"""{
+ "required" : [ "href" ],
+ "type" : "object",
+ "properties" : {
+ "href" : {
+ "title" : "Link URL",
+ "type" : "string",
+ "description" : "Target url for this link."
+ },
+ "type" : {
+ "title" : "Link type",
+ "type" : "string",
+ "description" : "Type of the resource referenced by this link."
+ },
+ "method" : {
+ "$ref" : "#/components/schemas/HALLinkMethod"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "A link target in a HAL response."
+}
+""",
+ object_hook=with_example_provider,
+)
+hal_link_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+hal_link_faker = JSF(hal_link_model_schema, allow_none_optionals=1)
+
+
+class HALLinkStub:
+ """HALLink unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return hal_link_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "HALLink":
+ """Create HALLink stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(HALLinkAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return HALLinkAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/header_array_option_stub.py b/test/types/header_array_option_stub.py
new file mode 100644
index 0000000..f14b5c7
--- /dev/null
+++ b/test/types/header_array_option_stub.py
@@ -0,0 +1,64 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.header_array_option import HeaderArrayOption
+
+ HeaderArrayOptionAdapter = TypeAdapter(HeaderArrayOption)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+header_array_option_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Allowed values for the render.header_array option.",
+ "enum" : [ "row", "column" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+header_array_option_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+header_array_option_faker = JSF(
+ header_array_option_model_schema, allow_none_optionals=1
+)
+
+
+class HeaderArrayOptionStub:
+ """HeaderArrayOption unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return header_array_option_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "HeaderArrayOption":
+ """Create HeaderArrayOption stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ HeaderArrayOptionAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return HeaderArrayOptionAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/hierarchical_stub.py b/test/types/hierarchical_stub.py
new file mode 100644
index 0000000..05d952e
--- /dev/null
+++ b/test/types/hierarchical_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.hierarchical import Hierarchical
+
+ HierarchicalAdapter = TypeAdapter(Hierarchical)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+hierarchical_model_schema = json.loads(
+ r"""{
+ "title" : "Hierarchical",
+ "description" : "if true, use hierarchical objects to represent multiple row (or column) dimensions, otherwise multi-keys get concatenated with a dot-delimiter. If the value is a list, only these levels are kept as separate levels, while remaining levels get concatenated keys",
+ "anyOf" : [ {
+ "type" : "boolean"
+ }, {
+ "type" : "array",
+ "items" : {
+ "type" : "string"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+hierarchical_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+hierarchical_faker = JSF(hierarchical_model_schema, allow_none_optionals=1)
+
+
+class HierarchicalStub:
+ """Hierarchical unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return hierarchical_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Hierarchical":
+ """Create Hierarchical stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ HierarchicalAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return HierarchicalAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/http_validation_error_stub.py b/test/types/http_validation_error_stub.py
new file mode 100644
index 0000000..8cffe43
--- /dev/null
+++ b/test/types/http_validation_error_stub.py
@@ -0,0 +1,73 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.http_validation_error import HTTPValidationError
+
+ HTTPValidationErrorAdapter = TypeAdapter(HTTPValidationError)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+http_validation_error_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "detail" : {
+ "title" : "Detail",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/ValidationError"
+ }
+ }
+ }
+}
+""",
+ object_hook=with_example_provider,
+)
+http_validation_error_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+http_validation_error_faker = JSF(
+ http_validation_error_model_schema, allow_none_optionals=1
+)
+
+
+class HTTPValidationErrorStub:
+ """HTTPValidationError unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return http_validation_error_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "HTTPValidationError":
+ """Create HTTPValidationError stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ HTTPValidationErrorAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return HTTPValidationErrorAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of10_stub.py b/test/types/interpolation_method_one_of10_stub.py
new file mode 100644
index 0000000..e15b602
--- /dev/null
+++ b/test/types/interpolation_method_one_of10_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of10 import (
+ InterpolationMethodOneOf10,
+ )
+
+ InterpolationMethodOneOf10Adapter = TypeAdapter(InterpolationMethodOneOf10)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_10_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of a user-specified order.",
+ "enum" : [ "spline" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_10_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_10_faker = JSF(
+ interpolation_method_one_of_10_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf10Stub:
+ """InterpolationMethodOneOf10 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_10_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf10":
+ """Create InterpolationMethodOneOf10 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf10Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf10Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of11_stub.py b/test/types/interpolation_method_one_of11_stub.py
new file mode 100644
index 0000000..a4c040a
--- /dev/null
+++ b/test/types/interpolation_method_one_of11_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of11 import (
+ InterpolationMethodOneOf11,
+ )
+
+ InterpolationMethodOneOf11Adapter = TypeAdapter(InterpolationMethodOneOf11)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_11_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with the derivative of order 1.",
+ "enum" : [ "from_derivatives" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_11_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_11_faker = JSF(
+ interpolation_method_one_of_11_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf11Stub:
+ """InterpolationMethodOneOf11 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_11_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf11":
+ """Create InterpolationMethodOneOf11 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf11Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf11Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of12_stub.py b/test/types/interpolation_method_one_of12_stub.py
new file mode 100644
index 0000000..c5ae03b
--- /dev/null
+++ b/test/types/interpolation_method_one_of12_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of12 import (
+ InterpolationMethodOneOf12,
+ )
+
+ InterpolationMethodOneOf12Adapter = TypeAdapter(InterpolationMethodOneOf12)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_12_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a piecewise cubic spline function.",
+ "enum" : [ "pchip" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_12_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_12_faker = JSF(
+ interpolation_method_one_of_12_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf12Stub:
+ """InterpolationMethodOneOf12 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_12_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf12":
+ """Create InterpolationMethodOneOf12 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf12Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf12Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of13_stub.py b/test/types/interpolation_method_one_of13_stub.py
new file mode 100644
index 0000000..fadd44a
--- /dev/null
+++ b/test/types/interpolation_method_one_of13_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of13 import (
+ InterpolationMethodOneOf13,
+ )
+
+ InterpolationMethodOneOf13Adapter = TypeAdapter(InterpolationMethodOneOf13)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_13_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a non-smoothing spline of order 2, called Akima interpolation.",
+ "enum" : [ "akima" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_13_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_13_faker = JSF(
+ interpolation_method_one_of_13_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf13Stub:
+ """InterpolationMethodOneOf13 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_13_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf13":
+ """Create InterpolationMethodOneOf13 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf13Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf13Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of1_stub.py b/test/types/interpolation_method_one_of1_stub.py
new file mode 100644
index 0000000..d36706d
--- /dev/null
+++ b/test/types/interpolation_method_one_of1_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of1 import (
+ InterpolationMethodOneOf1,
+ )
+
+ InterpolationMethodOneOf1Adapter = TypeAdapter(InterpolationMethodOneOf1)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_1_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a fixed, user-specified value. This method also extrapolates.",
+ "enum" : [ "fixed" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_1_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_1_faker = JSF(
+ interpolation_method_one_of_1_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf1Stub:
+ """InterpolationMethodOneOf1 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_1_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf1":
+ """Create InterpolationMethodOneOf1 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf1Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf1Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of2_stub.py b/test/types/interpolation_method_one_of2_stub.py
new file mode 100644
index 0000000..9caefdb
--- /dev/null
+++ b/test/types/interpolation_method_one_of2_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of2 import (
+ InterpolationMethodOneOf2,
+ )
+
+ InterpolationMethodOneOf2Adapter = TypeAdapter(InterpolationMethodOneOf2)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_2_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Same as pad, but using the last observed value. This method also extrapolates",
+ "enum" : [ "backfill" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_2_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_2_faker = JSF(
+ interpolation_method_one_of_2_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf2Stub:
+ """InterpolationMethodOneOf2 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_2_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf2":
+ """Create InterpolationMethodOneOf2 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf2Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf2Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of3_stub.py b/test/types/interpolation_method_one_of3_stub.py
new file mode 100644
index 0000000..4cff7b1
--- /dev/null
+++ b/test/types/interpolation_method_one_of3_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of3 import (
+ InterpolationMethodOneOf3,
+ )
+
+ InterpolationMethodOneOf3Adapter = TypeAdapter(InterpolationMethodOneOf3)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_3_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Linearly go from the first observed value of the gap to the last observed oneThis method also extrapolates",
+ "enum" : [ "linear" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_3_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_3_faker = JSF(
+ interpolation_method_one_of_3_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf3Stub:
+ """InterpolationMethodOneOf3 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_3_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf3":
+ """Create InterpolationMethodOneOf3 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf3Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf3Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of4_stub.py b/test/types/interpolation_method_one_of4_stub.py
new file mode 100644
index 0000000..b667c57
--- /dev/null
+++ b/test/types/interpolation_method_one_of4_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of4 import (
+ InterpolationMethodOneOf4,
+ )
+
+ InterpolationMethodOneOf4Adapter = TypeAdapter(InterpolationMethodOneOf4)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_4_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Use the value that is closest in time.",
+ "enum" : [ "nearest" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_4_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_4_faker = JSF(
+ interpolation_method_one_of_4_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf4Stub:
+ """InterpolationMethodOneOf4 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_4_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf4":
+ """Create InterpolationMethodOneOf4 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf4Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf4Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of5_stub.py b/test/types/interpolation_method_one_of5_stub.py
new file mode 100644
index 0000000..487a46e
--- /dev/null
+++ b/test/types/interpolation_method_one_of5_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of5 import (
+ InterpolationMethodOneOf5,
+ )
+
+ InterpolationMethodOneOf5Adapter = TypeAdapter(InterpolationMethodOneOf5)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_5_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 0, which is a piecewise polynomial.",
+ "enum" : [ "zero" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_5_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_5_faker = JSF(
+ interpolation_method_one_of_5_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf5Stub:
+ """InterpolationMethodOneOf5 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_5_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf5":
+ """Create InterpolationMethodOneOf5 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf5Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf5Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of6_stub.py b/test/types/interpolation_method_one_of6_stub.py
new file mode 100644
index 0000000..8f9a186
--- /dev/null
+++ b/test/types/interpolation_method_one_of6_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of6 import (
+ InterpolationMethodOneOf6,
+ )
+
+ InterpolationMethodOneOf6Adapter = TypeAdapter(InterpolationMethodOneOf6)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_6_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 1, which is a piecewise polynomial.",
+ "enum" : [ "slinear" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_6_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_6_faker = JSF(
+ interpolation_method_one_of_6_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf6Stub:
+ """InterpolationMethodOneOf6 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_6_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf6":
+ """Create InterpolationMethodOneOf6 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf6Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf6Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of7_stub.py b/test/types/interpolation_method_one_of7_stub.py
new file mode 100644
index 0000000..48653d1
--- /dev/null
+++ b/test/types/interpolation_method_one_of7_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of7 import (
+ InterpolationMethodOneOf7,
+ )
+
+ InterpolationMethodOneOf7Adapter = TypeAdapter(InterpolationMethodOneOf7)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_7_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 2, which is a piecewise polynomial.",
+ "enum" : [ "quadratic" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_7_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_7_faker = JSF(
+ interpolation_method_one_of_7_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf7Stub:
+ """InterpolationMethodOneOf7 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_7_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf7":
+ """Create InterpolationMethodOneOf7 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf7Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf7Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of8_stub.py b/test/types/interpolation_method_one_of8_stub.py
new file mode 100644
index 0000000..6c1c7c4
--- /dev/null
+++ b/test/types/interpolation_method_one_of8_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of8 import (
+ InterpolationMethodOneOf8,
+ )
+
+ InterpolationMethodOneOf8Adapter = TypeAdapter(InterpolationMethodOneOf8)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_8_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a spline function of order 3, which is a piecewise polynomial.",
+ "enum" : [ "cubic" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_8_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_8_faker = JSF(
+ interpolation_method_one_of_8_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf8Stub:
+ """InterpolationMethodOneOf8 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_8_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf8":
+ """Create InterpolationMethodOneOf8 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf8Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf8Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of9_stub.py b/test/types/interpolation_method_one_of9_stub.py
new file mode 100644
index 0000000..a8fd7d8
--- /dev/null
+++ b/test/types/interpolation_method_one_of9_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of9 import (
+ InterpolationMethodOneOf9,
+ )
+
+ InterpolationMethodOneOf9Adapter = TypeAdapter(InterpolationMethodOneOf9)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_9_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with a polynomial of the lowest possible degree passing trough the data points.",
+ "enum" : [ "polynomial" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_9_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_9_faker = JSF(
+ interpolation_method_one_of_9_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOf9Stub:
+ """InterpolationMethodOneOf9 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_9_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf9":
+ """Create InterpolationMethodOneOf9 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOf9Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOf9Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_one_of_stub.py b/test/types/interpolation_method_one_of_stub.py
new file mode 100644
index 0000000..5b8da57
--- /dev/null
+++ b/test/types/interpolation_method_one_of_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method_one_of import (
+ InterpolationMethodOneOf,
+ )
+
+ InterpolationMethodOneOfAdapter = TypeAdapter(InterpolationMethodOneOf)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_one_of_model_schema = json.loads(
+ r"""{
+ "type" : "string",
+ "description" : "Interpolate with the value of the first observed point. This method also extrapolates.",
+ "enum" : [ "pad" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_one_of_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_one_of_faker = JSF(
+ interpolation_method_one_of_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodOneOfStub:
+ """InterpolationMethodOneOf unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_one_of_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethodOneOf":
+ """Create InterpolationMethodOneOf stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodOneOfAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodOneOfAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_method_stub.py b/test/types/interpolation_method_stub.py
new file mode 100644
index 0000000..9d44b8f
--- /dev/null
+++ b/test/types/interpolation_method_stub.py
@@ -0,0 +1,92 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_method import InterpolationMethod
+
+ InterpolationMethodAdapter = TypeAdapter(InterpolationMethod)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_method_model_schema = json.loads(
+ r"""{
+ "title" : "Interpolation method",
+ "type" : "string",
+ "oneOf" : [ {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_1"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_2"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_3"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_4"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_5"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_6"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_7"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_8"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_9"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_10"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_11"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_12"
+ }, {
+ "$ref" : "#/components/schemas/Interpolation_method_oneOf_13"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_method_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_method_faker = JSF(
+ interpolation_method_model_schema, allow_none_optionals=1
+)
+
+
+class InterpolationMethodStub:
+ """InterpolationMethod unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_method_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationMethod":
+ """Create InterpolationMethod stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationMethodAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationMethodAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_spec_stub.py b/test/types/interpolation_spec_stub.py
new file mode 100644
index 0000000..a874323
--- /dev/null
+++ b/test/types/interpolation_spec_stub.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation_spec import InterpolationSpec
+
+ InterpolationSpecAdapter = TypeAdapter(InterpolationSpec)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_spec_model_schema = json.loads(
+ r"""{
+ "required" : [ "method" ],
+ "type" : "object",
+ "properties" : {
+ "method" : {
+ "$ref" : "#/components/schemas/Interpolation_method"
+ },
+ "value" : {
+ "title" : "Interpolation parameter",
+ "type" : "integer",
+ "description" : "Optional parameter value for the interpolation method (see method description)."
+ },
+ "order" : {
+ "title" : "Interpolation order",
+ "type" : "integer",
+ "description" : "Optional order parameter for the interpolation method (see method description)."
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Defines whether, and how to treat missing values.\n\nThis can occur in two circumstances when aggregating (setting a sample frequency):\n* missing values: if there are missing (or invalid) values stored for\na given freq-interval,\n\"interpolation\" specifies how to compute these.\n* down-sampling: when the specified freq is smaller than the series’\nactual frequency.\n\"interpolation\" specifies how to compute intermediate values."
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_spec_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_spec_faker = JSF(interpolation_spec_model_schema, allow_none_optionals=1)
+
+
+class InterpolationSpecStub:
+ """InterpolationSpec unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_spec_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "InterpolationSpec":
+ """Create InterpolationSpec stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationSpecAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationSpecAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/interpolation_stub.py b/test/types/interpolation_stub.py
new file mode 100644
index 0000000..7ff4667
--- /dev/null
+++ b/test/types/interpolation_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.interpolation import Interpolation
+
+ InterpolationAdapter = TypeAdapter(Interpolation)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+interpolation_model_schema = json.loads(
+ r"""{
+ "title" : "Interpolation",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/InterpolationMethod"
+ }, {
+ "$ref" : "#/components/schemas/InterpolationSpec"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+interpolation_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+interpolation_faker = JSF(interpolation_model_schema, allow_none_optionals=1)
+
+
+class InterpolationStub:
+ """Interpolation unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return interpolation_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Interpolation":
+ """Create Interpolation stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ InterpolationAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return InterpolationAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/links_stub.py b/test/types/links_stub.py
new file mode 100644
index 0000000..a1760fb
--- /dev/null
+++ b/test/types/links_stub.py
@@ -0,0 +1,64 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.links import Links
+
+ LinksAdapter = TypeAdapter(Links)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+links_model_schema = json.loads(
+ r"""{
+ "title" : "_Links",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/HALLink"
+ }, {
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/HALLink"
+ }
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+links_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+links_faker = JSF(links_model_schema, allow_none_optionals=1)
+
+
+class LinksStub:
+ """Links unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return links_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Links":
+ """Create Links stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(LinksAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return LinksAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/location_inner_stub.py b/test/types/location_inner_stub.py
new file mode 100644
index 0000000..21fcb7d
--- /dev/null
+++ b/test/types/location_inner_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.location_inner import LocationInner
+
+ LocationInnerAdapter = TypeAdapter(LocationInner)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+location_inner_model_schema = json.loads(
+ r"""{
+ "title" : "Location_inner",
+ "anyOf" : [ {
+ "type" : "string"
+ }, {
+ "type" : "integer"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+location_inner_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+location_inner_faker = JSF(location_inner_model_schema, allow_none_optionals=1)
+
+
+class LocationInnerStub:
+ """LocationInner unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return location_inner_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "LocationInner":
+ """Create LocationInner stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ LocationInnerAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return LocationInnerAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/message_arguments_stub.py b/test/types/message_arguments_stub.py
new file mode 100644
index 0000000..f745ad1
--- /dev/null
+++ b/test/types/message_arguments_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.message_arguments import MessageArguments
+
+ MessageArgumentsAdapter = TypeAdapter(MessageArguments)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+message_arguments_model_schema = json.loads(
+ r"""{
+ "title" : "Message Arguments",
+ "anyOf" : [ {
+ "type" : "string"
+ }, {
+ "$ref" : "#/components/schemas/MessageProperties"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+message_arguments_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+message_arguments_faker = JSF(message_arguments_model_schema, allow_none_optionals=1)
+
+
+class MessageArgumentsStub:
+ """MessageArguments unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return message_arguments_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "MessageArguments":
+ """Create MessageArguments stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ MessageArgumentsAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return MessageArgumentsAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/message_level_stub.py b/test/types/message_level_stub.py
new file mode 100644
index 0000000..b76b5cc
--- /dev/null
+++ b/test/types/message_level_stub.py
@@ -0,0 +1,63 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.message_level import MessageLevel
+
+ MessageLevelAdapter = TypeAdapter(MessageLevel)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+message_level_model_schema = json.loads(
+ r"""{
+ "title" : "Message_level",
+ "type" : "string",
+ "default" : "info",
+ "enum" : [ "debug", "info", "warning", "error", "fatal" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+message_level_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+message_level_faker = JSF(message_level_model_schema, allow_none_optionals=1)
+
+
+class MessageLevelStub:
+ """MessageLevel unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return message_level_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "MessageLevel":
+ """Create MessageLevel stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ MessageLevelAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return MessageLevelAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/message_properties_stub.py b/test/types/message_properties_stub.py
new file mode 100644
index 0000000..bd8d231
--- /dev/null
+++ b/test/types/message_properties_stub.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.message_properties import MessageProperties
+
+ MessagePropertiesAdapter = TypeAdapter(MessageProperties)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+message_properties_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Series resource id",
+ "type" : "string"
+ },
+ "metric" : {
+ "title" : "Series metric",
+ "type" : "string"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Additional message arguments."
+}
+""",
+ object_hook=with_example_provider,
+)
+message_properties_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+message_properties_faker = JSF(message_properties_model_schema, allow_none_optionals=1)
+
+
+class MessagePropertiesStub:
+ """MessageProperties unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return message_properties_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "MessageProperties":
+ """Create MessageProperties stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ MessagePropertiesAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return MessagePropertiesAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/message_stub.py b/test/types/message_stub.py
new file mode 100644
index 0000000..305c968
--- /dev/null
+++ b/test/types/message_stub.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.message import Message
+
+ MessageAdapter = TypeAdapter(Message)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+message_model_schema = json.loads(
+ r"""{
+ "title" : "Message",
+ "required" : [ "message" ],
+ "type" : "object",
+ "properties" : {
+ "code" : {
+ "title" : "code",
+ "type" : "string",
+ "nullable" : true
+ },
+ "message" : {
+ "title" : "Message",
+ "type" : "string"
+ },
+ "level" : {
+ "$ref" : "#/components/schemas/Message_level"
+ },
+ "args" : {
+ "title" : "args",
+ "type" : "object",
+ "nullable" : true
+ }
+ },
+ "description" : "Individual (info/warning/error) message in a response."
+}
+""",
+ object_hook=with_example_provider,
+)
+message_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+message_faker = JSF(message_model_schema, allow_none_optionals=1)
+
+
+class MessageStub:
+ """Message unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return message_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Message":
+ """Create Message stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(MessageAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return MessageAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/object_data_set_stub.py b/test/types/object_data_set_stub.py
new file mode 100644
index 0000000..ee88b87
--- /dev/null
+++ b/test/types/object_data_set_stub.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.object_data_set import ObjectDataSet
+
+ ObjectDataSetAdapter = TypeAdapter(ObjectDataSet)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+object_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "data" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data" : {
+ "title" : "Data",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/ObjectData"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Data result in object format.\n\nResult item when render option `render.header_array` is not set.\n\nThe data values are keyed by their attributes (`resource`, `metric`, `aggregation`),\naccording to the render options:\n* _hierachical_: for each level, a sub-object is created\n (e.g. `render.mode=hier_dict`)\n* _flattened_: the attributes are '.'-separated concatenation\n of the attributes (e.g `render.mode=flat_dict`)\n* _mixed_: (.e.g. `render.mode=metric_flat_dict`) a single level\n (e.g. `metric`) is used as main key, any remaining levels\n (`resource`,`aggregation`) are indicated with a flattened subkey.\n\nWhen `render.rollup=true`, the attribute levels that are the same for all series are\nnot used as key, but reported as a data or table attribute."
+}
+""",
+ object_hook=with_example_provider,
+)
+object_data_set_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+object_data_set_faker = JSF(object_data_set_model_schema, allow_none_optionals=1)
+
+
+class ObjectDataSetStub:
+ """ObjectDataSet unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return object_data_set_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ObjectDataSet":
+ """Create ObjectDataSet stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ObjectDataSetAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ObjectDataSetAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/object_data_stub.py b/test/types/object_data_stub.py
new file mode 100644
index 0000000..c905491
--- /dev/null
+++ b/test/types/object_data_stub.py
@@ -0,0 +1,98 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.object_data import ObjectData
+
+ ObjectDataAdapter = TypeAdapter(ObjectData)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+object_data_model_schema = json.loads(
+ r"""{
+ "required" : [ "timestamp" ],
+ "type" : "object",
+ "properties" : {
+ "timestamp" : {
+ "$ref" : "#/components/schemas/Timestamp"
+ },
+ "timestamp_iso" : {
+ "$ref" : "#/components/schemas/TimestampIso"
+ },
+ "role" : {
+ "$ref" : "#/components/schemas/Role"
+ },
+ "resource" : {
+ "title" : "Resource",
+ "type" : "string",
+ "description" : "Series resource id, if applicable for all values."
+ },
+ "metric" : {
+ "title" : "Metric",
+ "type" : "string",
+ "description" : "Series metric, if applicable for all values."
+ },
+ "aggregation" : {
+ "title" : "Aggregation",
+ "type" : "string",
+ "description" : "Series aggregation, if applicable for all values."
+ },
+ "levels" : {
+ "title" : "Hierarchical Levels",
+ "type" : "array",
+ "description" : "Attribute level names used to key the values for this observation.\n\nLevels that are flattened have a dot-separated key.\n\nIf all observations have the same attribute for a level, that level might be omitted.",
+ "example" : [ "resource", "metric", "aggregation" ],
+ "items" : {
+ "type" : "string"
+ }
+ }
+ },
+ "additionalProperties" : {
+ "$ref" : "#/components/schemas/ObjectData_value"
+ },
+ "description" : "Result data for a timestamp in object format."
+}
+""",
+ object_hook=with_example_provider,
+)
+object_data_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+object_data_faker = JSF(object_data_model_schema, allow_none_optionals=1)
+
+
+class ObjectDataStub:
+ """ObjectData unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return object_data_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ObjectData":
+ """Create ObjectData stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(ObjectDataAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ObjectDataAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/object_data_value_stub.py b/test/types/object_data_value_stub.py
new file mode 100644
index 0000000..a260eed
--- /dev/null
+++ b/test/types/object_data_value_stub.py
@@ -0,0 +1,67 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.object_data_value import ObjectDataValue
+
+ ObjectDataValueAdapter = TypeAdapter(ObjectDataValue)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+object_data_value_model_schema = json.loads(
+ r"""{
+ "title" : "Data ",
+ "oneOf" : [ {
+ "title" : "Hierarchical Data",
+ "type" : "object",
+ "description" : "Values for the series whose attributes corresponds with the key. Keyed by sub-levels."
+ }, {
+ "$ref" : "#/components/schemas/Datum"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+object_data_value_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+object_data_value_faker = JSF(object_data_value_model_schema, allow_none_optionals=1)
+
+
+class ObjectDataValueStub:
+ """ObjectDataValue unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return object_data_value_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ObjectDataValue":
+ """Create ObjectDataValue stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ObjectDataValueAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ObjectDataValueAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/queries_list_response_stub.py b/test/types/queries_list_response_stub.py
new file mode 100644
index 0000000..9d33b9d
--- /dev/null
+++ b/test/types/queries_list_response_stub.py
@@ -0,0 +1,107 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.queries_list_response import QueriesListResponse
+
+ QueriesListResponseAdapter = TypeAdapter(QueriesListResponse)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+queries_list_response_model_schema = json.loads(
+ r"""{
+ "required" : [ "_links", "count", "limit", "offset", "queries" ],
+ "type" : "object",
+ "properties" : {
+ "messages" : {
+ "title" : "Messages",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Message"
+ }
+ },
+ "queries" : {
+ "title" : "Query item list",
+ "type" : "array",
+ "description" : "One page of matching query definitions.",
+ "items" : {
+ "$ref" : "#/components/schemas/QueryListItem"
+ }
+ },
+ "count" : {
+ "title" : "Current page size",
+ "type" : "integer",
+ "description" : "Number of query definitions returned in the current response."
+ },
+ "offset" : {
+ "title" : "Page offset",
+ "type" : "integer",
+ "description" : "Offset in the full listing (skipped definitions)."
+ },
+ "limit" : {
+ "title" : "Page size limit",
+ "type" : "integer",
+ "description" : "Maximal number of query definitions returned in one response."
+ },
+ "total_count" : {
+ "title" : "Total count",
+ "type" : "integer",
+ "description" : "Total number of query definitions matching the filter."
+ },
+ "_links" : {
+ "$ref" : "#/components/schemas/QueryListHALLinks"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Listing of named queries, with paging links."
+}
+""",
+ object_hook=with_example_provider,
+)
+queries_list_response_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+queries_list_response_faker = JSF(
+ queries_list_response_model_schema, allow_none_optionals=1
+)
+
+
+class QueriesListResponseStub:
+ """QueriesListResponse unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return queries_list_response_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "QueriesListResponse":
+ """Create QueriesListResponse stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueriesListResponseAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueriesListResponseAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_definition_stub.py b/test/types/query_definition_stub.py
new file mode 100644
index 0000000..00e58a9
--- /dev/null
+++ b/test/types/query_definition_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_definition import QueryDefinition
+
+ QueryDefinitionAdapter = TypeAdapter(QueryDefinition)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_definition_model_schema = json.loads(
+ r"""{
+ "title" : "Query Definition",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/QueryUpdateInput"
+ }, {
+ "$ref" : "#/components/schemas/Query-Input"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+query_definition_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_definition_faker = JSF(query_definition_model_schema, allow_none_optionals=1)
+
+
+class QueryDefinitionStub:
+ """QueryDefinition unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_definition_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryDefinition":
+ """Create QueryDefinition stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryDefinitionAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryDefinitionAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_entity_input_stub.py b/test/types/query_entity_input_stub.py
new file mode 100644
index 0000000..96a2b21
--- /dev/null
+++ b/test/types/query_entity_input_stub.py
@@ -0,0 +1,78 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_entity_input import QueryEntityInput
+
+ QueryEntityInputAdapter = TypeAdapter(QueryEntityInput)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_entity_input_model_schema = json.loads(
+ r"""{
+ "required" : [ "name", "query" ],
+ "type" : "object",
+ "properties" : {
+ "name" : {
+ "title" : "Query name",
+ "type" : "string",
+ "description" : "Name of the stored query definition."
+ },
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Input"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Input data to create a query definition."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_entity_input_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_entity_input_faker = JSF(query_entity_input_model_schema, allow_none_optionals=1)
+
+
+class QueryEntityInputStub:
+ """QueryEntityInput unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_entity_input_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryEntityInput":
+ """Create QueryEntityInput stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryEntityInputAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryEntityInputAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_execution_message_level_stub.py b/test/types/query_execution_message_level_stub.py
new file mode 100644
index 0000000..0645514
--- /dev/null
+++ b/test/types/query_execution_message_level_stub.py
@@ -0,0 +1,68 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_execution_message_level import (
+ QueryExecutionMessageLevel,
+ )
+
+ QueryExecutionMessageLevelAdapter = TypeAdapter(QueryExecutionMessageLevel)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_execution_message_level_model_schema = json.loads(
+ r"""{
+ "title" : "QueryExecutionMessage_level",
+ "type" : "string",
+ "enum" : [ "debug", "info", "warning", "error" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+query_execution_message_level_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_execution_message_level_faker = JSF(
+ query_execution_message_level_model_schema, allow_none_optionals=1
+)
+
+
+class QueryExecutionMessageLevelStub:
+ """QueryExecutionMessageLevel unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_execution_message_level_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "QueryExecutionMessageLevel":
+ """Create QueryExecutionMessageLevel stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryExecutionMessageLevelAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryExecutionMessageLevelAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_execution_message_stub.py b/test/types/query_execution_message_stub.py
new file mode 100644
index 0000000..e3aa654
--- /dev/null
+++ b/test/types/query_execution_message_stub.py
@@ -0,0 +1,101 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_execution_message import (
+ QueryExecutionMessage,
+ )
+
+ QueryExecutionMessageAdapter = TypeAdapter(QueryExecutionMessage)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_execution_message_model_schema = json.loads(
+ r"""{
+ "required" : [ "action", "category", "level", "message", "timestamp" ],
+ "type" : "object",
+ "properties" : {
+ "message" : {
+ "title" : "Message",
+ "type" : "string",
+ "description" : "A human readable message."
+ },
+ "level" : {
+ "$ref" : "#/components/schemas/QueryExecutionMessage_level"
+ },
+ "timestamp" : {
+ "title" : "Timestamp",
+ "type" : "string",
+ "format" : "date-time"
+ },
+ "action" : {
+ "title" : "Action",
+ "type" : "string",
+ "description" : "The request action that caused this message."
+ },
+ "category" : {
+ "title" : "Message Category",
+ "type" : "string",
+ "description" : "The subsystem that issued this message.",
+ "example" : "data"
+ },
+ "properties" : {
+ "$ref" : "#/components/schemas/Message_Arguments"
+ },
+ "exception" : {
+ "$ref" : "#/components/schemas/CauseException"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "A message object that informs or warns about a query execution issue."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_execution_message_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_execution_message_faker = JSF(
+ query_execution_message_model_schema, allow_none_optionals=1
+)
+
+
+class QueryExecutionMessageStub:
+ """QueryExecutionMessage unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_execution_message_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "QueryExecutionMessage":
+ """Create QueryExecutionMessage stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryExecutionMessageAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryExecutionMessageAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_hal_links_stub.py b/test/types/query_hal_links_stub.py
new file mode 100644
index 0000000..dcbdb4c
--- /dev/null
+++ b/test/types/query_hal_links_stub.py
@@ -0,0 +1,71 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_hal_links import QueryHALLinks
+
+ QueryHALLinksAdapter = TypeAdapter(QueryHALLinks)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_hal_links_model_schema = json.loads(
+ r"""{
+ "required" : [ "execute", "self" ],
+ "type" : "object",
+ "properties" : {
+ "self" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "execute" : {
+ "$ref" : "#/components/schemas/HALLink"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "HAL Links for a query entity."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_hal_links_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_hal_links_faker = JSF(query_hal_links_model_schema, allow_none_optionals=1)
+
+
+class QueryHALLinksStub:
+ """QueryHALLinks unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_hal_links_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryHALLinks":
+ """Create QueryHALLinks stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryHALLinksAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryHALLinksAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_input_stub.py b/test/types/query_input_stub.py
new file mode 100644
index 0000000..d148a11
--- /dev/null
+++ b/test/types/query_input_stub.py
@@ -0,0 +1,109 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_input import QueryInput
+
+ QueryInputAdapter = TypeAdapter(QueryInput)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_input_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Default Resource",
+ "type" : "string",
+ "description" : "Default resource for the series in the query."
+ },
+ "metric" : {
+ "title" : "Default Metric",
+ "type" : "string",
+ "description" : "Default metric for the series in the query."
+ },
+ "aggregation" : {
+ "$ref" : "#/components/schemas/Default_Aggregation"
+ },
+ "interpolation" : {
+ "$ref" : "#/components/schemas/Default_Interpolation"
+ },
+ "freq" : {
+ "$ref" : "#/components/schemas/Grouping_interval"
+ },
+ "from" : {
+ "$ref" : "#/components/schemas/Time_Window_From"
+ },
+ "until" : {
+ "$ref" : "#/components/schemas/Time_Window_Until"
+ },
+ "window" : {
+ "$ref" : "#/components/schemas/Window"
+ },
+ "periods" : {
+ "title" : "Periods",
+ "type" : "integer",
+ "description" : "The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers."
+ },
+ "align" : {
+ "$ref" : "#/components/schemas/Alignment"
+ },
+ "data" : {
+ "title" : "Series specifications",
+ "type" : "array",
+ "description" : "List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ).",
+ "items" : {
+ "$ref" : "#/components/schemas/SeriesSpec"
+ }
+ },
+ "render" : {
+ "$ref" : "#/components/schemas/Render"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Query definition for a Waylay analytics query.\n\nSee also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation)."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_input_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_input_faker = JSF(query_input_model_schema, allow_none_optionals=1)
+
+
+class QueryInputStub:
+ """QueryInput unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_input_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryInput":
+ """Create QueryInput stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(QueryInputAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryInputAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_list_hal_links_stub.py b/test/types/query_list_hal_links_stub.py
new file mode 100644
index 0000000..36aa88a
--- /dev/null
+++ b/test/types/query_list_hal_links_stub.py
@@ -0,0 +1,83 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_list_hal_links import QueryListHALLinks
+
+ QueryListHALLinksAdapter = TypeAdapter(QueryListHALLinks)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_list_hal_links_model_schema = json.loads(
+ r"""{
+ "title" : "QueryListHALLinks",
+ "required" : [ "self" ],
+ "type" : "object",
+ "properties" : {
+ "self" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "first" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "prev" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "next" : {
+ "$ref" : "#/components/schemas/HALLink"
+ },
+ "last" : {
+ "$ref" : "#/components/schemas/HALLink"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "HAL Links for a query entity."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_list_hal_links_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_list_hal_links_faker = JSF(
+ query_list_hal_links_model_schema, allow_none_optionals=1
+)
+
+
+class QueryListHALLinksStub:
+ """QueryListHALLinks unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_list_hal_links_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryListHALLinks":
+ """Create QueryListHALLinks stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryListHALLinksAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryListHALLinksAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_list_item_stub.py b/test/types/query_list_item_stub.py
new file mode 100644
index 0000000..bb4f070
--- /dev/null
+++ b/test/types/query_list_item_stub.py
@@ -0,0 +1,84 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_list_item import QueryListItem
+
+ QueryListItemAdapter = TypeAdapter(QueryListItem)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_list_item_model_schema = json.loads(
+ r"""{
+ "title" : "QueryListItem",
+ "required" : [ "_links", "attrs", "name" ],
+ "type" : "object",
+ "properties" : {
+ "_links" : {
+ "$ref" : "#/components/schemas/QueryHALLinks"
+ },
+ "attrs" : {
+ "title" : "Query attributes",
+ "type" : "object",
+ "description" : "System provided metadata for the query definition."
+ },
+ "name" : {
+ "title" : "Query name",
+ "type" : "string",
+ "description" : "Name of the stored query definition."
+ },
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Listing of a query definition item."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_list_item_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_list_item_faker = JSF(query_list_item_model_schema, allow_none_optionals=1)
+
+
+class QueryListItemStub:
+ """QueryListItem unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_list_item_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryListItem":
+ """Create QueryListItem stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryListItemAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryListItemAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_output_stub.py b/test/types/query_output_stub.py
new file mode 100644
index 0000000..7ac72d2
--- /dev/null
+++ b/test/types/query_output_stub.py
@@ -0,0 +1,109 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_output import QueryOutput
+
+ QueryOutputAdapter = TypeAdapter(QueryOutput)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_output_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "resource" : {
+ "title" : "Default Resource",
+ "type" : "string",
+ "description" : "Default resource for the series in the query."
+ },
+ "metric" : {
+ "title" : "Default Metric",
+ "type" : "string",
+ "description" : "Default metric for the series in the query."
+ },
+ "aggregation" : {
+ "$ref" : "#/components/schemas/Default_Aggregation"
+ },
+ "interpolation" : {
+ "$ref" : "#/components/schemas/Default_Interpolation"
+ },
+ "freq" : {
+ "$ref" : "#/components/schemas/Grouping_interval"
+ },
+ "from" : {
+ "$ref" : "#/components/schemas/Time_Window_From"
+ },
+ "until" : {
+ "$ref" : "#/components/schemas/Time_Window_Until"
+ },
+ "window" : {
+ "$ref" : "#/components/schemas/Window"
+ },
+ "periods" : {
+ "title" : "Periods",
+ "type" : "integer",
+ "description" : "The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers."
+ },
+ "align" : {
+ "$ref" : "#/components/schemas/Alignment"
+ },
+ "data" : {
+ "title" : "Series specifications",
+ "type" : "array",
+ "description" : "List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ).",
+ "items" : {
+ "$ref" : "#/components/schemas/SeriesSpec"
+ }
+ },
+ "render" : {
+ "$ref" : "#/components/schemas/Render"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Query definition for a Waylay analytics query.\n\nSee also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation)."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_output_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_output_faker = JSF(query_output_model_schema, allow_none_optionals=1)
+
+
+class QueryOutputStub:
+ """QueryOutput unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_output_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryOutput":
+ """Create QueryOutput stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(QueryOutputAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryOutputAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_response_stub.py b/test/types/query_response_stub.py
new file mode 100644
index 0000000..3069839
--- /dev/null
+++ b/test/types/query_response_stub.py
@@ -0,0 +1,93 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_response import QueryResponse
+
+ QueryResponseAdapter = TypeAdapter(QueryResponse)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_response_model_schema = json.loads(
+ r"""{
+ "required" : [ "_links", "attrs", "name", "query" ],
+ "type" : "object",
+ "properties" : {
+ "_links" : {
+ "$ref" : "#/components/schemas/QueryHALLinks"
+ },
+ "attrs" : {
+ "title" : "Query attributes",
+ "type" : "object",
+ "description" : "System provided metadata for the query definition."
+ },
+ "name" : {
+ "title" : "Query name",
+ "type" : "string",
+ "description" : "Name of the stored query definition."
+ },
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Output"
+ },
+ "messages" : {
+ "title" : "Messages",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Message"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Represents a single named query."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_response_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_response_faker = JSF(query_response_model_schema, allow_none_optionals=1)
+
+
+class QueryResponseStub:
+ """QueryResponse unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_response_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryResponse":
+ """Create QueryResponse stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryResponseAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryResponseAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_result_stub.py b/test/types/query_result_stub.py
new file mode 100644
index 0000000..927e672
--- /dev/null
+++ b/test/types/query_result_stub.py
@@ -0,0 +1,89 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_result import QueryResult
+
+ QueryResultAdapter = TypeAdapter(QueryResult)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_result_model_schema = json.loads(
+ r"""{
+ "required" : [ "data", "messages", "query" ],
+ "type" : "object",
+ "properties" : {
+ "data" : {
+ "title" : "Response Data Sets",
+ "type" : "array",
+ "description" : "A list of data sets, each with their own time axis. There will be one dataset for each `role` specified in the query (by default a single `input` role).\n\nThe data is represented according to the `render` options in the query (default `COMPACT_WS`).",
+ "items" : {
+ "$ref" : "#/components/schemas/Response_Data_Set"
+ }
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Input"
+ },
+ "messages" : {
+ "title" : "Messages and Warnings",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/QueryExecutionMessage"
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "A json data response.\n\nUses the format as specified by the\n`render` options of the request (defaults to `COMPACT_WS`).\n'",
+ "example" : {
+ "data" : [ ],
+ "query" : {
+ "resource" : "R",
+ "metric" : "temperature"
+ },
+ "messages" : [ ]
+ }
+}
+""",
+ object_hook=with_example_provider,
+)
+query_result_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_result_faker = JSF(query_result_model_schema, allow_none_optionals=1)
+
+
+class QueryResultStub:
+ """QueryResult unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_result_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryResult":
+ """Create QueryResult stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(QueryResultAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryResultAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/query_update_input_stub.py b/test/types/query_update_input_stub.py
new file mode 100644
index 0000000..6388a1a
--- /dev/null
+++ b/test/types/query_update_input_stub.py
@@ -0,0 +1,72 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.query_update_input import QueryUpdateInput
+
+ QueryUpdateInputAdapter = TypeAdapter(QueryUpdateInput)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+query_update_input_model_schema = json.loads(
+ r"""{
+ "type" : "object",
+ "properties" : {
+ "meta" : {
+ "title" : "Query metadata",
+ "type" : "object",
+ "description" : "User metadata for the query definition."
+ },
+ "query" : {
+ "$ref" : "#/components/schemas/Query-Input"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Input data to update a query definition."
+}
+""",
+ object_hook=with_example_provider,
+)
+query_update_input_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+query_update_input_faker = JSF(query_update_input_model_schema, allow_none_optionals=1)
+
+
+class QueryUpdateInputStub:
+ """QueryUpdateInput unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return query_update_input_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "QueryUpdateInput":
+ """Create QueryUpdateInput stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ QueryUpdateInputAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return QueryUpdateInputAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render1_stub.py b/test/types/render1_stub.py
new file mode 100644
index 0000000..a68a152
--- /dev/null
+++ b/test/types/render1_stub.py
@@ -0,0 +1,61 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render1 import Render1
+
+ Render1Adapter = TypeAdapter(Render1)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_1_model_schema = json.loads(
+ r"""{
+ "title" : "Render",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/_RenderMode"
+ }, {
+ "$ref" : "#/components/schemas/Render"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_1_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_1_faker = JSF(render_1_model_schema, allow_none_optionals=1)
+
+
+class Render1Stub:
+ """Render1 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_1_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Render1":
+ """Create Render1 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(Render1Adapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return Render1Adapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/render_mode_one_of1_stub.py b/test/types/render_mode_one_of1_stub.py
new file mode 100644
index 0000000..bb649e8
--- /dev/null
+++ b/test/types/render_mode_one_of1_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of1 import RenderModeOneOf1
+
+ RenderModeOneOf1Adapter = TypeAdapter(RenderModeOneOf1)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_1_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_1",
+ "type" : "string",
+ "description" : "Render rows of timestamp and values. Show column headers.\n\n###### options\n- `iso_timestamp`: `False`\n- `header_array`: `row`\n- `roll_up`: `False`\n- `data_axis`: `column`",
+ "enum" : [ "COMPACT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_1_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_1_faker = JSF(
+ render_mode_one_of_1_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf1Stub:
+ """RenderModeOneOf1 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_1_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf1":
+ """Create RenderModeOneOf1 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf1Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf1Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of2_stub.py b/test/types/render_mode_one_of2_stub.py
new file mode 100644
index 0000000..8d66ae4
--- /dev/null
+++ b/test/types/render_mode_one_of2_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of2 import RenderModeOneOf2
+
+ RenderModeOneOf2Adapter = TypeAdapter(RenderModeOneOf2)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_2_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_2",
+ "type" : "string",
+ "description" : "Render rows of timestamp and values. Show column headers. Show the time window attributes.\n\n###### options\n- `iso_timestamp`: `False`\n- `header_array`: `row`\n- `roll_up`: `False`\n- `data_axis`: `column`\n- `include_window_spec`: `True`",
+ "enum" : [ "COMPACT_WS" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_2_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_2_faker = JSF(
+ render_mode_one_of_2_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf2Stub:
+ """RenderModeOneOf2 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_2_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf2":
+ """Create RenderModeOneOf2 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf2Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf2Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of3_stub.py b/test/types/render_mode_one_of3_stub.py
new file mode 100644
index 0000000..ae34083
--- /dev/null
+++ b/test/types/render_mode_one_of3_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of3 import RenderModeOneOf3
+
+ RenderModeOneOf3Adapter = TypeAdapter(RenderModeOneOf3)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_3_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_3",
+ "type" : "string",
+ "description" : "Render timestamps and each series (column) as a values array. Show column headers.\n\n###### options\n- `iso_timestamp`: `False`\n- `header_array`: `row`\n- `data_axis`: `row`\n- `roll_up`: `True`\n- `include_window_spec`: `True`",
+ "enum" : [ "SERIES" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_3_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_3_faker = JSF(
+ render_mode_one_of_3_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf3Stub:
+ """RenderModeOneOf3 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_3_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf3":
+ """Create RenderModeOneOf3 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf3Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf3Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of4_stub.py b/test/types/render_mode_one_of4_stub.py
new file mode 100644
index 0000000..23b906f
--- /dev/null
+++ b/test/types/render_mode_one_of4_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of4 import RenderModeOneOf4
+
+ RenderModeOneOf4Adapter = TypeAdapter(RenderModeOneOf4)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_4_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_4",
+ "type" : "string",
+ "description" : "Renders row index in `rows`, and each series as a values array.\n\nThe series are prefixed by their series attributes.The `rows` index is prefixed by the labels for these attributes.\n\n###### options\n- `iso_timestamp`: `True`\n- `header_array`: `column`\n- `roll_up`: `False`\n- `data_axis`: `row`",
+ "enum" : [ "HEADER_COLUMN" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_4_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_4_faker = JSF(
+ render_mode_one_of_4_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf4Stub:
+ """RenderModeOneOf4 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_4_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf4":
+ """Create RenderModeOneOf4 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf4Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf4Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of5_stub.py b/test/types/render_mode_one_of5_stub.py
new file mode 100644
index 0000000..b367e30
--- /dev/null
+++ b/test/types/render_mode_one_of5_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of5 import RenderModeOneOf5
+
+ RenderModeOneOf5Adapter = TypeAdapter(RenderModeOneOf5)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_5_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_5",
+ "type" : "string",
+ "description" : "Render an object for each observation. Uses flattened keys.\n\n###### options\n- `iso_timestamp`: `True`\n- `hierarchical`: `False`\n- `show_levels`: `True`\n- `roll_up`: `False`",
+ "enum" : [ "FLAT_DICT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_5_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_5_faker = JSF(
+ render_mode_one_of_5_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf5Stub:
+ """RenderModeOneOf5 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_5_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf5":
+ """Create RenderModeOneOf5 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf5Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf5Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of6_stub.py b/test/types/render_mode_one_of6_stub.py
new file mode 100644
index 0000000..f87f8ca
--- /dev/null
+++ b/test/types/render_mode_one_of6_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of6 import RenderModeOneOf6
+
+ RenderModeOneOf6Adapter = TypeAdapter(RenderModeOneOf6)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_6_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_6",
+ "type" : "string",
+ "description" : "Render an hierarchical object for each observation. Shows an iso timestamp.\n\n###### options\n- `iso_timestamp`: `True`\n- `hierarchical`: `True`\n- `show_levels`: `True`\n- `roll_up`: `True`",
+ "enum" : [ "HIER_DICT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_6_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_6_faker = JSF(
+ render_mode_one_of_6_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf6Stub:
+ """RenderModeOneOf6 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_6_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf6":
+ """Create RenderModeOneOf6 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf6Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf6Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of7_stub.py b/test/types/render_mode_one_of7_stub.py
new file mode 100644
index 0000000..225b042
--- /dev/null
+++ b/test/types/render_mode_one_of7_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of7 import RenderModeOneOf7
+
+ RenderModeOneOf7Adapter = TypeAdapter(RenderModeOneOf7)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_7_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_7",
+ "type" : "string",
+ "description" : "Render an object with metric keys for each observation. Shows an iso timestamp.\n\n###### options\n- `iso_timestamp`: `True`\n- `hierarchical`: `['metric']`\n- `show_levels`: `False`\n- `roll_up`: `True`\n- `key_skip_empty`: `True`",
+ "enum" : [ "METRIC_FLAT_DICT" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_7_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_7_faker = JSF(
+ render_mode_one_of_7_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf7Stub:
+ """RenderModeOneOf7 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_7_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf7":
+ """Create RenderModeOneOf7 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf7Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf7Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of8_stub.py b/test/types/render_mode_one_of8_stub.py
new file mode 100644
index 0000000..92c2f2b
--- /dev/null
+++ b/test/types/render_mode_one_of8_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of8 import RenderModeOneOf8
+
+ RenderModeOneOf8Adapter = TypeAdapter(RenderModeOneOf8)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_8_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_8",
+ "type" : "string",
+ "description" : "Render in an object format compatible with the `/data/v1/events` upload.\n\n###### options\n- `iso_timestamp`: `False`\n- `hierarchical`: `False`\n- `show_levels`: `False`\n- `roll_up`: `True`",
+ "enum" : [ "UPLOAD" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_8_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_8_faker = JSF(
+ render_mode_one_of_8_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf8Stub:
+ """RenderModeOneOf8 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_8_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf8":
+ """Create RenderModeOneOf8 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf8Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf8Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of9_stub.py b/test/types/render_mode_one_of9_stub.py
new file mode 100644
index 0000000..3c5d397
--- /dev/null
+++ b/test/types/render_mode_one_of9_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of9 import RenderModeOneOf9
+
+ RenderModeOneOf9Adapter = TypeAdapter(RenderModeOneOf9)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_9_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf_9",
+ "type" : "string",
+ "description" : "Render in csv format with row headers.\n\n###### options\n- `iso_timestamp`: `False`",
+ "enum" : [ "CSV" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_9_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_9_faker = JSF(
+ render_mode_one_of_9_model_schema, allow_none_optionals=1
+)
+
+
+class RenderModeOneOf9Stub:
+ """RenderModeOneOf9 unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_9_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf9":
+ """Create RenderModeOneOf9 stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOf9Adapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOf9Adapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_one_of_stub.py b/test/types/render_mode_one_of_stub.py
new file mode 100644
index 0000000..77040d4
--- /dev/null
+++ b/test/types/render_mode_one_of_stub.py
@@ -0,0 +1,63 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode_one_of import RenderModeOneOf
+
+ RenderModeOneOfAdapter = TypeAdapter(RenderModeOneOf)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_one_of_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode_oneOf",
+ "type" : "string",
+ "description" : "Render rows of timestamp and values. Show column headers. Includes an iso timestamp.\n\n###### options\n- `iso_timestamp`: `True`\n- `header_array`: `row`\n- `roll_up`: `False`\n- `data_axis`: `column`",
+ "enum" : [ "HEADER_ROW" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_one_of_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_one_of_faker = JSF(render_mode_one_of_model_schema, allow_none_optionals=1)
+
+
+class RenderModeOneOfStub:
+ """RenderModeOneOf unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_one_of_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderModeOneOf":
+ """Create RenderModeOneOf stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RenderModeOneOfAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeOneOfAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_mode_stub.py b/test/types/render_mode_stub.py
new file mode 100644
index 0000000..4cb8c9e
--- /dev/null
+++ b/test/types/render_mode_stub.py
@@ -0,0 +1,81 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render_mode import RenderMode
+
+ RenderModeAdapter = TypeAdapter(RenderMode)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_mode_model_schema = json.loads(
+ r"""{
+ "title" : "_RenderMode",
+ "type" : "string",
+ "description" : "Render mode configuration keys.",
+ "oneOf" : [ {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_1"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_2"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_3"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_4"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_5"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_6"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_7"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_8"
+ }, {
+ "$ref" : "#/components/schemas/_RenderMode_oneOf_9"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+render_mode_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_mode_faker = JSF(render_mode_model_schema, allow_none_optionals=1)
+
+
+class RenderModeStub:
+ """RenderMode unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_mode_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RenderMode":
+ """Create RenderMode stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(RenderModeAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderModeAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/render_stub.py b/test/types/render_stub.py
new file mode 100644
index 0000000..dc62937
--- /dev/null
+++ b/test/types/render_stub.py
@@ -0,0 +1,118 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.render import Render
+
+ RenderAdapter = TypeAdapter(Render)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+render_model_schema = json.loads(
+ r"""{
+ "title" : "Render",
+ "type" : "object",
+ "properties" : {
+ "mode" : {
+ "$ref" : "#/components/schemas/_RenderMode"
+ },
+ "roll_up" : {
+ "title" : "Roll Up",
+ "type" : "boolean",
+ "description" : "move up attributes on rows (or columns) that are the same for all rows (or columns) to a table attribute. Levels enumerated in 'hierarchical' are excluded."
+ },
+ "hierarchical" : {
+ "$ref" : "#/components/schemas/Hierarchical"
+ },
+ "value_key" : {
+ "title" : "Value Key",
+ "type" : "string",
+ "description" : "if set, use this key in the value object to report data values"
+ },
+ "show_levels" : {
+ "title" : "Show Levels",
+ "type" : "boolean",
+ "description" : "if set, report the levels used in the data values (either hierarchical or flat)"
+ },
+ "iso_timestamp" : {
+ "title" : "Iso Timestamp",
+ "type" : "boolean",
+ "description" : "if set, render timestamps in a row or column index with both epoch and iso representations"
+ },
+ "row_key" : {
+ "title" : "Row Key",
+ "type" : "string",
+ "description" : "if set, use this key as name of the row-dimension for single-dimensional rows"
+ },
+ "column_key" : {
+ "title" : "Column Key",
+ "type" : "string",
+ "description" : "if set, use this key as name of the column-dimension for single-dimensional columns"
+ },
+ "header_array" : {
+ "$ref" : "#/components/schemas/HeaderArrayOption"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/DataAxisOption"
+ },
+ "key_seperator" : {
+ "title" : "Key Seperator",
+ "type" : "string",
+ "description" : "character used to concatenate multi-key columns or rows when required"
+ },
+ "key_skip_empty" : {
+ "title" : "Key Skip Empty",
+ "type" : "boolean",
+ "description" : "skip empty values in concatenating multi-key column or row headers"
+ },
+ "include_window_spec" : {
+ "title" : "Include Window Spec",
+ "type" : "boolean",
+ "description" : "if set, include window specification in render modes that support it"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Configures the representation of data sets returned by the query API."
+}
+""",
+ object_hook=with_example_provider,
+)
+render_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+render_faker = JSF(render_model_schema, allow_none_optionals=1)
+
+
+class RenderStub:
+ """Render unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return render_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Render":
+ """Create Render stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(RenderAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RenderAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/response_data_set_stub.py b/test/types/response_data_set_stub.py
new file mode 100644
index 0000000..dc370f5
--- /dev/null
+++ b/test/types/response_data_set_stub.py
@@ -0,0 +1,70 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.response_data_set import ResponseDataSet
+
+ ResponseDataSetAdapter = TypeAdapter(ResponseDataSet)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+response_data_set_model_schema = json.loads(
+ r"""{
+ "title" : "Response Data Set",
+ "description" : "Result timeseries data set, with one time dimension.",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/RowDataSet"
+ }, {
+ "$ref" : "#/components/schemas/SeriesDataSet"
+ }, {
+ "$ref" : "#/components/schemas/ColumnDataSet"
+ }, {
+ "$ref" : "#/components/schemas/ObjectDataSet"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+response_data_set_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+response_data_set_faker = JSF(response_data_set_model_schema, allow_none_optionals=1)
+
+
+class ResponseDataSetStub:
+ """ResponseDataSet unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return response_data_set_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ResponseDataSet":
+ """Create ResponseDataSet stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ResponseDataSetAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ResponseDataSetAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/row_data_set_data_axis_stub.py b/test/types/row_data_set_data_axis_stub.py
new file mode 100644
index 0000000..6c650be
--- /dev/null
+++ b/test/types/row_data_set_data_axis_stub.py
@@ -0,0 +1,67 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.row_data_set_data_axis import RowDataSetDataAxis
+
+ RowDataSetDataAxisAdapter = TypeAdapter(RowDataSetDataAxis)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+row_data_set_data_axis_model_schema = json.loads(
+ r"""{
+ "title" : "RowDataSet_data_axis",
+ "type" : "string",
+ "default" : "column",
+ "enum" : [ "column" ]
+}
+""",
+ object_hook=with_example_provider,
+)
+row_data_set_data_axis_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+row_data_set_data_axis_faker = JSF(
+ row_data_set_data_axis_model_schema, allow_none_optionals=1
+)
+
+
+class RowDataSetDataAxisStub:
+ """RowDataSetDataAxis unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return row_data_set_data_axis_faker.generate(
+ use_defaults=True, use_examples=True
+ )
+
+ @classmethod
+ def create_instance(cls) -> "RowDataSetDataAxis":
+ """Create RowDataSetDataAxis stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RowDataSetDataAxisAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RowDataSetDataAxisAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/row_data_set_stub.py b/test/types/row_data_set_stub.py
new file mode 100644
index 0000000..0705e38
--- /dev/null
+++ b/test/types/row_data_set_stub.py
@@ -0,0 +1,99 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.row_data_set import RowDataSet
+
+ RowDataSetAdapter = TypeAdapter(RowDataSet)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+row_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "columns", "data" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/RowDataSet_data_axis"
+ },
+ "columns" : {
+ "title" : "Column Headers",
+ "type" : "array",
+ "description" : "Header Attributes for the column data.\n\nThe initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps).\n\nThe remaining object-valued column headers identify and describe the actual series data.",
+ "items" : {
+ "$ref" : "#/components/schemas/Column_Headers_inner"
+ },
+ "x-prefixItems" : [ {
+ "const" : "timestamp",
+ "title" : "Unix epoch milliseconds timestamp."
+ } ]
+ },
+ "data" : {
+ "title" : "Data",
+ "type" : "array",
+ "items" : {
+ "title" : "Observation",
+ "type" : "array",
+ "description" : "Row index data (timestamp), and a value for each of the series.",
+ "items" : {
+ "$ref" : "#/components/schemas/Datum"
+ },
+ "x-prefixItems" : [ {
+ "$ref" : "#/components/schemas/Timestamp"
+ } ]
+ }
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Row-oriented dataset.\n\nTimeseries data layout with a column header and a data row per timestamp.\nResult for render options `data_axis=column` and `header_array=row`.\","
+}
+""",
+ object_hook=with_example_provider,
+)
+row_data_set_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+row_data_set_faker = JSF(row_data_set_model_schema, allow_none_optionals=1)
+
+
+class RowDataSetStub:
+ """RowDataSet unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return row_data_set_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RowDataSet":
+ """Create RowDataSet stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(RowDataSetAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RowDataSetAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/row_header_stub.py b/test/types/row_header_stub.py
new file mode 100644
index 0000000..09f414b
--- /dev/null
+++ b/test/types/row_header_stub.py
@@ -0,0 +1,67 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.row_header import RowHeader
+
+ RowHeaderAdapter = TypeAdapter(RowHeader)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+row_header_model_schema = json.loads(
+ r"""{
+ "required" : [ "timestamp" ],
+ "type" : "object",
+ "properties" : {
+ "timestamp" : {
+ "$ref" : "#/components/schemas/Timestamp"
+ },
+ "timestamp_iso" : {
+ "$ref" : "#/components/schemas/TimestampIso"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Index entry attributes.\n\nAttributes for a timestamp index entry."
+}
+""",
+ object_hook=with_example_provider,
+)
+row_header_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+row_header_faker = JSF(row_header_model_schema, allow_none_optionals=1)
+
+
+class RowHeaderStub:
+ """RowHeader unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return row_header_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RowHeader":
+ """Create RowHeader stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(RowHeaderAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RowHeaderAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/test/types/row_headers_inner_stub.py b/test/types/row_headers_inner_stub.py
new file mode 100644
index 0000000..112cf5e
--- /dev/null
+++ b/test/types/row_headers_inner_stub.py
@@ -0,0 +1,65 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.row_headers_inner import RowHeadersInner
+
+ RowHeadersInnerAdapter = TypeAdapter(RowHeadersInner)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+row_headers_inner_model_schema = json.loads(
+ r"""{
+ "title" : "Row_Headers_inner",
+ "anyOf" : [ {
+ "$ref" : "#/components/schemas/ColumnIndexRowHeader"
+ }, {
+ "$ref" : "#/components/schemas/RowHeader"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+row_headers_inner_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+row_headers_inner_faker = JSF(row_headers_inner_model_schema, allow_none_optionals=1)
+
+
+class RowHeadersInnerStub:
+ """RowHeadersInner unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return row_headers_inner_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "RowHeadersInner":
+ """Create RowHeadersInner stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ RowHeadersInnerAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return RowHeadersInnerAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/series_data_set_stub.py b/test/types/series_data_set_stub.py
new file mode 100644
index 0000000..167fd5e
--- /dev/null
+++ b/test/types/series_data_set_stub.py
@@ -0,0 +1,106 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.series_data_set import SeriesDataSet
+
+ SeriesDataSetAdapter = TypeAdapter(SeriesDataSet)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+series_data_set_model_schema = json.loads(
+ r"""{
+ "required" : [ "columns", "data" ],
+ "type" : "object",
+ "properties" : {
+ "attributes" : {
+ "$ref" : "#/components/schemas/DataSetAttributes"
+ },
+ "window_spec" : {
+ "$ref" : "#/components/schemas/DataSetWindow"
+ },
+ "data_axis" : {
+ "$ref" : "#/components/schemas/ColumnDataSet_data_axis"
+ },
+ "columns" : {
+ "title" : "Column Headers",
+ "type" : "array",
+ "description" : "Header Attributes for the column data.\n\nThe initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps).\n\nThe remaining object-valued column headers identify and describe the actual series data.",
+ "items" : {
+ "$ref" : "#/components/schemas/Column_Headers_inner"
+ },
+ "x-prefixItems" : [ {
+ "const" : "timestamp",
+ "title" : "Unix epoch milliseconds timestamp."
+ } ]
+ },
+ "data" : {
+ "title" : "Data",
+ "type" : "array",
+ "items" : {
+ "title" : "Series",
+ "type" : "array",
+ "description" : "All metric observation values for a single series.",
+ "items" : {
+ "$ref" : "#/components/schemas/Datum"
+ }
+ },
+ "x-prefixItems" : [ {
+ "items" : {
+ "$ref" : "#/components/schemas/Timestamp"
+ },
+ "type" : "array",
+ "title" : "Timestamp Index",
+ "description" : "The timestamp index for this result data."
+ } ]
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Column-oriented dataset.\n\nTimeseries data layout with a column header\nand a seperate data array for the time index and each series.\nResult for render options `data_axis=row` and `header_array=row`."
+}
+""",
+ object_hook=with_example_provider,
+)
+series_data_set_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+series_data_set_faker = JSF(series_data_set_model_schema, allow_none_optionals=1)
+
+
+class SeriesDataSetStub:
+ """SeriesDataSet unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return series_data_set_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "SeriesDataSet":
+ """Create SeriesDataSet stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ SeriesDataSetAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return SeriesDataSetAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/series_spec_stub.py b/test/types/series_spec_stub.py
new file mode 100644
index 0000000..b199e50
--- /dev/null
+++ b/test/types/series_spec_stub.py
@@ -0,0 +1,87 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.series_spec import SeriesSpec
+
+ SeriesSpecAdapter = TypeAdapter(SeriesSpec)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+series_spec_model_schema = json.loads(
+ r"""{
+ "title" : "SeriesSpec",
+ "type" : "object",
+ "properties" : {
+ "name" : {
+ "title" : "Name",
+ "type" : "string",
+ "description" : "Optional alias name for the series. This name is used when exporting the dataset to CSV format.",
+ "example" : "demoQuery"
+ },
+ "resource" : {
+ "title" : "Resource",
+ "type" : "string",
+ "description" : "Resource id for the series, required unless it is specified as a query default.",
+ "example" : "13efb488-75ac-4dac-828a-d49c5c2ebbfc"
+ },
+ "metric" : {
+ "title" : "Metric",
+ "type" : "string",
+ "description" : "Metric name for the series, required unless it is specified as a query default.",
+ "example" : "temperature"
+ },
+ "aggregration" : {
+ "$ref" : "#/components/schemas/AggregationMethod"
+ },
+ "interpolation" : {
+ "$ref" : "#/components/schemas/Interpolation"
+ }
+ },
+ "additionalProperties" : true,
+ "description" : "Query specification for a single series."
+}
+""",
+ object_hook=with_example_provider,
+)
+series_spec_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+series_spec_faker = JSF(series_spec_model_schema, allow_none_optionals=1)
+
+
+class SeriesSpecStub:
+ """SeriesSpec unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return series_spec_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "SeriesSpec":
+ """Create SeriesSpec stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(SeriesSpecAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return SeriesSpecAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/time_window_from_stub.py b/test/types/time_window_from_stub.py
new file mode 100644
index 0000000..8a1098e
--- /dev/null
+++ b/test/types/time_window_from_stub.py
@@ -0,0 +1,82 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.time_window_from import TimeWindowFrom
+
+ TimeWindowFromAdapter = TypeAdapter(TimeWindowFrom)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+time_window_from_model_schema = json.loads(
+ r"""{
+ "title" : "Time Window From",
+ "description" : "The start of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 absolute timestamp",
+ "pattern" : "[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?",
+ "type" : "string",
+ "description" : "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ "format" : "date-time",
+ "example" : "2018-03-21T12:23:00+01:00"
+ }, {
+ "title" : "UNIX epoch milliseconds",
+ "minimum" : 0,
+ "type" : "integer",
+ "description" : "Absolute timestamp milliseconds in unix epoch since 1970-01-01.",
+ "example" : 1534836422284
+ }, {
+ "title" : "ISO8601 Period Before Now",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+time_window_from_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+time_window_from_faker = JSF(time_window_from_model_schema, allow_none_optionals=1)
+
+
+class TimeWindowFromStub:
+ """TimeWindowFrom unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return time_window_from_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "TimeWindowFrom":
+ """Create TimeWindowFrom stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ TimeWindowFromAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return TimeWindowFromAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/time_window_until_stub.py b/test/types/time_window_until_stub.py
new file mode 100644
index 0000000..7248de1
--- /dev/null
+++ b/test/types/time_window_until_stub.py
@@ -0,0 +1,82 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.time_window_until import TimeWindowUntil
+
+ TimeWindowUntilAdapter = TypeAdapter(TimeWindowUntil)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+time_window_until_model_schema = json.loads(
+ r"""{
+ "title" : "Time Window Until",
+ "description" : "The end (not-inclusive) of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 absolute timestamp",
+ "pattern" : "[0-9]{4}-[0-9]{2}-[0-9]{2}(T.*)?",
+ "type" : "string",
+ "description" : "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ "format" : "date-time",
+ "example" : "2018-03-21T12:23:00+01:00"
+ }, {
+ "title" : "UNIX epoch milliseconds",
+ "minimum" : 0,
+ "type" : "integer",
+ "description" : "Absolute timestamp milliseconds in unix epoch since 1970-01-01.",
+ "example" : 1534836422284
+ }, {
+ "title" : "ISO8601 Period Before Now",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+time_window_until_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+time_window_until_faker = JSF(time_window_until_model_schema, allow_none_optionals=1)
+
+
+class TimeWindowUntilStub:
+ """TimeWindowUntil unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return time_window_until_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "TimeWindowUntil":
+ """Create TimeWindowUntil stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ TimeWindowUntilAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return TimeWindowUntilAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/validation_error_stub.py b/test/types/validation_error_stub.py
new file mode 100644
index 0000000..dea8956
--- /dev/null
+++ b/test/types/validation_error_stub.py
@@ -0,0 +1,79 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.validation_error import ValidationError
+
+ ValidationErrorAdapter = TypeAdapter(ValidationError)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+validation_error_model_schema = json.loads(
+ r"""{
+ "title" : "ValidationError",
+ "required" : [ "loc", "msg", "type" ],
+ "type" : "object",
+ "properties" : {
+ "loc" : {
+ "title" : "Location",
+ "type" : "array",
+ "items" : {
+ "$ref" : "#/components/schemas/Location_inner"
+ }
+ },
+ "msg" : {
+ "title" : "Message",
+ "type" : "string"
+ },
+ "type" : {
+ "title" : "Error Type",
+ "type" : "string"
+ }
+ }
+}
+""",
+ object_hook=with_example_provider,
+)
+validation_error_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+validation_error_faker = JSF(validation_error_model_schema, allow_none_optionals=1)
+
+
+class ValidationErrorStub:
+ """ValidationError unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return validation_error_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "ValidationError":
+ """Create ValidationError stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ ValidationErrorAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return ValidationErrorAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/window_override_stub.py b/test/types/window_override_stub.py
new file mode 100644
index 0000000..5877d21
--- /dev/null
+++ b/test/types/window_override_stub.py
@@ -0,0 +1,69 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.window_override import WindowOverride
+
+ WindowOverrideAdapter = TypeAdapter(WindowOverride)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+window_override__model_schema = json.loads(
+ r"""{
+ "title" : "Window Override.",
+ "type" : "string",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+window_override__model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+window_override__faker = JSF(window_override__model_schema, allow_none_optionals=1)
+
+
+class WindowOverrideStub:
+ """WindowOverride unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return window_override__faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "WindowOverride":
+ """Create WindowOverride stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(
+ WindowOverrideAdapter.json_schema(), allow_none_optionals=1
+ )
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return WindowOverrideAdapter.validate_python(
+ json, context={"skip_validation": True}
+ )
diff --git a/test/types/window_stub.py b/test/types/window_stub.py
new file mode 100644
index 0000000..ba258f1
--- /dev/null
+++ b/test/types/window_stub.py
@@ -0,0 +1,66 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) model tests.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+import json
+
+from jsf import JSF
+from pydantic import TypeAdapter
+
+from ..openapi import MODEL_DEFINITIONS, with_example_provider
+
+try:
+ from waylay.services.queries.models.window import Window
+
+ WindowAdapter = TypeAdapter(Window)
+ MODELS_AVAILABLE = True
+except ImportError as exc:
+ MODELS_AVAILABLE = False
+
+window_model_schema = json.loads(
+ r"""{
+ "title" : "Window",
+ "type" : "string",
+ "description" : "The absolute size of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ "oneOf" : [ {
+ "title" : "ISO8601 period ",
+ "pattern" : "^P([0-9]+Y)?([0-9]+M)?([0-9]+W)?([0-9]+D)?(T([0-9]+H)?([0-9]+M)?([0-9]+(\\.[0-9]*)?S)?)?$",
+ "type" : "string",
+ "description" : "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ "format" : "period",
+ "example" : "PT3H15M"
+ } ]
+}
+""",
+ object_hook=with_example_provider,
+)
+window_model_schema.update({"definitions": MODEL_DEFINITIONS})
+
+window_faker = JSF(window_model_schema, allow_none_optionals=1)
+
+
+class WindowStub:
+ """Window unit test stubs."""
+
+ @classmethod
+ def create_json(cls):
+ """Create a dict stub instance."""
+ return window_faker.generate(use_defaults=True, use_examples=True)
+
+ @classmethod
+ def create_instance(cls) -> "Window":
+ """Create Window stub instance."""
+ if not MODELS_AVAILABLE:
+ raise ImportError("Models must be installed to create class stubs")
+ json = cls.create_json()
+ if not json:
+ # use backup example based on the pydantic model schema
+ backup_faker = JSF(WindowAdapter.json_schema(), allow_none_optionals=1)
+ json = backup_faker.generate(use_defaults=True, use_examples=True)
+ return WindowAdapter.validate_python(json, context={"skip_validation": True})
diff --git a/waylay-sdk-queries-types/.openapi-generator/FILES b/waylay-sdk-queries-types/.openapi-generator/FILES
new file mode 100644
index 0000000..4ec7e1a
--- /dev/null
+++ b/waylay-sdk-queries-types/.openapi-generator/FILES
@@ -0,0 +1,115 @@
+LICENSE.txt
+pyproject.toml
+src/waylay/services/queries/models/__init__.py
+src/waylay/services/queries/models/aggregation_by_resource_and_metric.py
+src/waylay/services/queries/models/aggregation_by_resource_or_metric.py
+src/waylay/services/queries/models/aggregation_method.py
+src/waylay/services/queries/models/aggregation_method_one_of.py
+src/waylay/services/queries/models/aggregation_method_one_of1.py
+src/waylay/services/queries/models/aggregation_method_one_of2.py
+src/waylay/services/queries/models/aggregation_method_one_of3.py
+src/waylay/services/queries/models/aggregation_method_one_of4.py
+src/waylay/services/queries/models/aggregation_method_one_of5.py
+src/waylay/services/queries/models/aggregation_method_one_of6.py
+src/waylay/services/queries/models/aggregation_method_one_of7.py
+src/waylay/services/queries/models/aggregation_method_one_of8.py
+src/waylay/services/queries/models/aggregations_inner.py
+src/waylay/services/queries/models/align_at.py
+src/waylay/services/queries/models/align_shift.py
+src/waylay/services/queries/models/alignment.py
+src/waylay/services/queries/models/alignment_grid_interval.py
+src/waylay/services/queries/models/alignment_timezone.py
+src/waylay/services/queries/models/cause_exception.py
+src/waylay/services/queries/models/column_data_set.py
+src/waylay/services/queries/models/column_data_set_data_axis.py
+src/waylay/services/queries/models/column_header.py
+src/waylay/services/queries/models/column_headers_inner.py
+src/waylay/services/queries/models/data_axis_option.py
+src/waylay/services/queries/models/data_set_attributes.py
+src/waylay/services/queries/models/data_set_window.py
+src/waylay/services/queries/models/datum.py
+src/waylay/services/queries/models/default_aggregation.py
+src/waylay/services/queries/models/default_interpolation.py
+src/waylay/services/queries/models/delete_response.py
+src/waylay/services/queries/models/embeddings.py
+src/waylay/services/queries/models/from_override.py
+src/waylay/services/queries/models/grouping_interval.py
+src/waylay/services/queries/models/grouping_interval_override.py
+src/waylay/services/queries/models/grouping_interval_override_one_of.py
+src/waylay/services/queries/models/hal_link.py
+src/waylay/services/queries/models/hal_link_method.py
+src/waylay/services/queries/models/hal_link_role.py
+src/waylay/services/queries/models/header_array_option.py
+src/waylay/services/queries/models/hierarchical.py
+src/waylay/services/queries/models/http_validation_error.py
+src/waylay/services/queries/models/interpolation.py
+src/waylay/services/queries/models/interpolation_method.py
+src/waylay/services/queries/models/interpolation_method_one_of.py
+src/waylay/services/queries/models/interpolation_method_one_of1.py
+src/waylay/services/queries/models/interpolation_method_one_of10.py
+src/waylay/services/queries/models/interpolation_method_one_of11.py
+src/waylay/services/queries/models/interpolation_method_one_of12.py
+src/waylay/services/queries/models/interpolation_method_one_of13.py
+src/waylay/services/queries/models/interpolation_method_one_of2.py
+src/waylay/services/queries/models/interpolation_method_one_of3.py
+src/waylay/services/queries/models/interpolation_method_one_of4.py
+src/waylay/services/queries/models/interpolation_method_one_of5.py
+src/waylay/services/queries/models/interpolation_method_one_of6.py
+src/waylay/services/queries/models/interpolation_method_one_of7.py
+src/waylay/services/queries/models/interpolation_method_one_of8.py
+src/waylay/services/queries/models/interpolation_method_one_of9.py
+src/waylay/services/queries/models/interpolation_spec.py
+src/waylay/services/queries/models/links.py
+src/waylay/services/queries/models/location_inner.py
+src/waylay/services/queries/models/message.py
+src/waylay/services/queries/models/message_arguments.py
+src/waylay/services/queries/models/message_level.py
+src/waylay/services/queries/models/message_properties.py
+src/waylay/services/queries/models/object.py
+src/waylay/services/queries/models/object_data.py
+src/waylay/services/queries/models/object_data_set.py
+src/waylay/services/queries/models/object_data_value.py
+src/waylay/services/queries/models/py.typed
+src/waylay/services/queries/models/queries_list_response.py
+src/waylay/services/queries/models/query_definition.py
+src/waylay/services/queries/models/query_entity_input.py
+src/waylay/services/queries/models/query_execution_message.py
+src/waylay/services/queries/models/query_execution_message_level.py
+src/waylay/services/queries/models/query_hal_links.py
+src/waylay/services/queries/models/query_input.py
+src/waylay/services/queries/models/query_list_hal_links.py
+src/waylay/services/queries/models/query_list_item.py
+src/waylay/services/queries/models/query_output.py
+src/waylay/services/queries/models/query_response.py
+src/waylay/services/queries/models/query_result.py
+src/waylay/services/queries/models/query_update_input.py
+src/waylay/services/queries/models/render.py
+src/waylay/services/queries/models/render1.py
+src/waylay/services/queries/models/render_mode.py
+src/waylay/services/queries/models/render_mode_one_of.py
+src/waylay/services/queries/models/render_mode_one_of1.py
+src/waylay/services/queries/models/render_mode_one_of2.py
+src/waylay/services/queries/models/render_mode_one_of3.py
+src/waylay/services/queries/models/render_mode_one_of4.py
+src/waylay/services/queries/models/render_mode_one_of5.py
+src/waylay/services/queries/models/render_mode_one_of6.py
+src/waylay/services/queries/models/render_mode_one_of7.py
+src/waylay/services/queries/models/render_mode_one_of8.py
+src/waylay/services/queries/models/render_mode_one_of9.py
+src/waylay/services/queries/models/response_data_set.py
+src/waylay/services/queries/models/row_data_set.py
+src/waylay/services/queries/models/row_data_set_data_axis.py
+src/waylay/services/queries/models/row_header.py
+src/waylay/services/queries/models/row_headers_inner.py
+src/waylay/services/queries/models/series_data_set.py
+src/waylay/services/queries/models/series_spec.py
+src/waylay/services/queries/models/time_window_from.py
+src/waylay/services/queries/models/time_window_until.py
+src/waylay/services/queries/models/validation_error.py
+src/waylay/services/queries/models/window.py
+src/waylay/services/queries/models/window_override.py
+src/waylay/services/queries/queries/__init__.py
+src/waylay/services/queries/queries/execute_api.py
+src/waylay/services/queries/queries/manage_api.py
+src/waylay/services/queries/queries/py.typed
+src/waylay/services/queries/queries/status_api.py
diff --git a/waylay-sdk-queries-types/.openapi-generator/VERSION b/waylay-sdk-queries-types/.openapi-generator/VERSION
new file mode 100644
index 0000000..1985849
--- /dev/null
+++ b/waylay-sdk-queries-types/.openapi-generator/VERSION
@@ -0,0 +1 @@
+7.7.0
diff --git a/waylay-sdk-queries-types/LICENSE.txt b/waylay-sdk-queries-types/LICENSE.txt
new file mode 100644
index 0000000..2796771
--- /dev/null
+++ b/waylay-sdk-queries-types/LICENSE.txt
@@ -0,0 +1,13 @@
+ISC License (ISC)
+Copyright 2024, Waylay
+
+Permission to use, copy, modify, and/or distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright notice
+and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
\ No newline at end of file
diff --git a/waylay-sdk-queries-types/README.md b/waylay-sdk-queries-types/README.md
new file mode 100644
index 0000000..a7b6f6d
--- /dev/null
+++ b/waylay-sdk-queries-types/README.md
@@ -0,0 +1,65 @@
+# Waylay Queries Service
+
+Execute and store queries on the Waylay timeseries.
+
+Protocol version: v1.
+
+This Python package is automatically generated based on the
+Waylay Queries OpenAPI specification (API version: 0.5.0)
+For more information, please visit [the openapi specification](https://docs.waylay.io/openapi/public/redocly/queries.html).
+
+It is considered an extension of the waylay-sdk-queries package, and it consists of the typed model classes for all path params, query params, body params and responses for each of the api methods in `waylay-sdk-queries`.
+
+## Requirements.
+This package requires Python 3.9+.
+
+## Installation
+
+Normally this package is installed together with support for other services using the [waylay-sdk](https://pypi.org/project/waylay-sdk/) umbrella package:
+* `pip install waylay-sdk` will install `waylay-sdk-queries` together with the SDK api packages for other services.
+* `pip install waylay-sdk[types-queries]` will additionally install the types package `waylay-sdk-queries-types`.
+* `pip install waylay-sdk[types]` will install the types packages for this and all other services.
+
+Alternatively, you can install support for this _queries_ service only, installing or extending an existing [waylay-sdk-core](https://pypi.org/project/waylay-sdk-core/):
+
+- `pip install waylay-sdk-queries` to only install api support for _queries_.
+- `pip install waylay-sdk-queries[types]` to additionally install type support for _queries_.
+
+## Usage
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_input import QueryInput
+from waylay.services.queries.models.query_result import QueryResult
+try:
+ # Execute Query
+ # calls `POST /queries/v1/queries/v1/data`
+ api_response = await waylay_client.queries.execute.execute(
+ # query parameters:
+ query = {
+ 'resource': '13efb488-75ac-4dac-828a-d49c5c2ebbfc'
+ 'metric': 'temperature'
+ },
+ # json data: use a generated model or a json-serializable python data structure (dict, list)
+ json = waylay.services.queries.QueryInput() # QueryInput |
+ headers = {
+ 'accept': 'accept_example',
+ },
+ )
+ print("The response of queries.execute.execute:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.execute.execute: %s\n" % e)
+```
+
+
+For more information, please visit the [Waylay API documentation](https://docs.waylay.io/#/api/?id=software-development-kits).
diff --git a/waylay-sdk-queries-types/pyproject.toml b/waylay-sdk-queries-types/pyproject.toml
new file mode 100644
index 0000000..9c18c9d
--- /dev/null
+++ b/waylay-sdk-queries-types/pyproject.toml
@@ -0,0 +1,73 @@
+[build-system]
+requires = ["setuptools >= 61.0"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "waylay-sdk-queries-types"
+version = "0.5.0.20240802"
+description = "Waylay Query: timeseries queries (v1 protocol) Types "
+authors = [
+ { name = "Waylay", email = "info@waylay.io"}
+]
+keywords = ["Waylay Query: timeseries queries (v1 protocol)" , "Types"]
+requires-python = ">= 3.9"
+dependencies = [
+ "waylay-sdk-core ~= 0.2.3",
+ "waylay-sdk-queries == 0.5.0.20240802",
+ "pydantic ~= 2.6",
+ "typing-extensions ~= 4.10",
+ "eval-type-backport ~= 0.1.3; python_version < '3.10'",
+]
+readme = "README.md"
+license={file = "LICENSE.txt"}
+
+[project.urls]
+Homepage = "https://www.waylay.io/"
+Documentation = "https://docs.waylay.io/#/api/?id=software-development-kits"
+Repository = "https://github.com/waylayio/waylay-sdk-queries-py.git"
+"Openapi Specification" = "https://docs.waylay.io/openapi/public/redocly/queries.html"
+
+[project.optional-dependencies]
+dev = [
+ "mypy",
+ "ruff",
+ "types-python-jose",
+ "types-appdirs",
+ "types-python-dateutil",
+ "pytest",
+ "pytest-mock",
+ "pytest-httpx",
+ "pytest-asyncio",
+ "starlette",
+ "python-multipart",
+ "typeguard",
+ "pyyaml",
+ "jsf >= 0.11.1",
+]
+
+
+[tool.setuptools.packages.find]
+where = ["src/"]
+namespaces = true
+
+[tool.ruff]
+include = ["pyproject.toml", "src/**/*.py"]
+
+[tool.ruff.lint]
+# allow duplicate imports
+ignore=["F811"]
+# https://docs.astral.sh/ruff/rules
+select= [
+ "UP007", "FA102", # convert Union to | (pep-604)
+ "I001", "F401", # sort and remove unused imports
+ "PIE790", # remove unnecessary pass statements
+ "E303", # too many blank lines
+]
+
+[tool.ruff.lint.per-file-ignores]
+# do not touch imports here
+"__init__.py" = ["F401"]
+"conftest.py" = ["F401"]
+
+[tool.pytest.ini_options]
+asyncio_mode = "auto"
\ No newline at end of file
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__init__.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/__init__.py
new file mode 100644
index 0000000..73e2608
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/__init__.py
@@ -0,0 +1,231 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol): REST Models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+version: 0.5.0
+
+ Execute and store queries on the Waylay timeseries. Protocol version: v1.
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+__version__ = "0.5.0.20240802"
+
+# import models into model package
+from .aggregation_by_resource_and_metric import AggregationByResourceAndMetric
+from .aggregation_by_resource_or_metric import AggregationByResourceOrMetric
+from .aggregation_method import AggregationMethod
+from .aggregation_method_one_of import AggregationMethodOneOf
+from .aggregation_method_one_of1 import AggregationMethodOneOf1
+from .aggregation_method_one_of2 import AggregationMethodOneOf2
+from .aggregation_method_one_of3 import AggregationMethodOneOf3
+from .aggregation_method_one_of4 import AggregationMethodOneOf4
+from .aggregation_method_one_of5 import AggregationMethodOneOf5
+from .aggregation_method_one_of6 import AggregationMethodOneOf6
+from .aggregation_method_one_of7 import AggregationMethodOneOf7
+from .aggregation_method_one_of8 import AggregationMethodOneOf8
+from .aggregations_inner import AggregationsInner
+from .align_at import AlignAt
+from .align_shift import AlignShift
+from .alignment import Alignment
+from .alignment_grid_interval import AlignmentGridInterval
+from .alignment_timezone import AlignmentTimezone
+from .cause_exception import CauseException
+from .column_data_set import ColumnDataSet
+from .column_data_set_data_axis import ColumnDataSetDataAxis
+from .column_header import ColumnHeader
+from .column_headers_inner import ColumnHeadersInner
+from .data_axis_option import DataAxisOption
+from .data_set_attributes import DataSetAttributes
+from .data_set_window import DataSetWindow
+from .datum import Datum
+from .default_aggregation import DefaultAggregation
+from .default_interpolation import DefaultInterpolation
+from .delete_response import DeleteResponse
+from .embeddings import Embeddings
+from .from_override import FromOverride
+from .grouping_interval import GroupingInterval
+from .grouping_interval_override import GroupingIntervalOverride
+from .grouping_interval_override_one_of import GroupingIntervalOverrideOneOf
+from .hal_link import HALLink
+from .hal_link_method import HALLinkMethod
+from .hal_link_role import HALLinkRole
+from .header_array_option import HeaderArrayOption
+from .hierarchical import Hierarchical
+from .http_validation_error import HTTPValidationError
+from .interpolation import Interpolation
+from .interpolation_method import InterpolationMethod
+from .interpolation_method_one_of import InterpolationMethodOneOf
+from .interpolation_method_one_of1 import InterpolationMethodOneOf1
+from .interpolation_method_one_of2 import InterpolationMethodOneOf2
+from .interpolation_method_one_of3 import InterpolationMethodOneOf3
+from .interpolation_method_one_of4 import InterpolationMethodOneOf4
+from .interpolation_method_one_of5 import InterpolationMethodOneOf5
+from .interpolation_method_one_of6 import InterpolationMethodOneOf6
+from .interpolation_method_one_of7 import InterpolationMethodOneOf7
+from .interpolation_method_one_of8 import InterpolationMethodOneOf8
+from .interpolation_method_one_of9 import InterpolationMethodOneOf9
+from .interpolation_method_one_of10 import InterpolationMethodOneOf10
+from .interpolation_method_one_of11 import InterpolationMethodOneOf11
+from .interpolation_method_one_of12 import InterpolationMethodOneOf12
+from .interpolation_method_one_of13 import InterpolationMethodOneOf13
+from .interpolation_spec import InterpolationSpec
+from .links import Links
+from .location_inner import LocationInner
+from .message import Message
+from .message_arguments import MessageArguments
+from .message_level import MessageLevel
+from .message_properties import MessageProperties
+from .object_data import ObjectData
+from .object_data_set import ObjectDataSet
+from .object_data_value import ObjectDataValue
+from .queries_list_response import QueriesListResponse
+from .query_definition import QueryDefinition
+from .query_entity_input import QueryEntityInput
+from .query_execution_message import QueryExecutionMessage
+from .query_execution_message_level import QueryExecutionMessageLevel
+from .query_hal_links import QueryHALLinks
+from .query_input import QueryInput
+from .query_list_hal_links import QueryListHALLinks
+from .query_list_item import QueryListItem
+from .query_output import QueryOutput
+from .query_response import QueryResponse
+from .query_result import QueryResult
+from .query_update_input import QueryUpdateInput
+from .render import Render
+from .render1 import Render1
+from .render_mode import RenderMode
+from .render_mode_one_of import RenderModeOneOf
+from .render_mode_one_of1 import RenderModeOneOf1
+from .render_mode_one_of2 import RenderModeOneOf2
+from .render_mode_one_of3 import RenderModeOneOf3
+from .render_mode_one_of4 import RenderModeOneOf4
+from .render_mode_one_of5 import RenderModeOneOf5
+from .render_mode_one_of6 import RenderModeOneOf6
+from .render_mode_one_of7 import RenderModeOneOf7
+from .render_mode_one_of8 import RenderModeOneOf8
+from .render_mode_one_of9 import RenderModeOneOf9
+from .response_data_set import ResponseDataSet
+from .row_data_set import RowDataSet
+from .row_data_set_data_axis import RowDataSetDataAxis
+from .row_header import RowHeader
+from .row_headers_inner import RowHeadersInner
+from .series_data_set import SeriesDataSet
+from .series_spec import SeriesSpec
+from .time_window_from import TimeWindowFrom
+from .time_window_until import TimeWindowUntil
+from .validation_error import ValidationError
+from .window import Window
+from .window_override import WindowOverride
+
+__all__ = [
+ "__version__",
+ "AggregationByResourceAndMetric",
+ "AggregationByResourceOrMetric",
+ "AggregationMethod",
+ "AggregationMethodOneOf",
+ "AggregationMethodOneOf1",
+ "AggregationMethodOneOf2",
+ "AggregationMethodOneOf3",
+ "AggregationMethodOneOf4",
+ "AggregationMethodOneOf5",
+ "AggregationMethodOneOf6",
+ "AggregationMethodOneOf7",
+ "AggregationMethodOneOf8",
+ "AggregationsInner",
+ "AlignAt",
+ "AlignShift",
+ "Alignment",
+ "AlignmentGridInterval",
+ "AlignmentTimezone",
+ "CauseException",
+ "ColumnDataSet",
+ "ColumnDataSetDataAxis",
+ "ColumnHeader",
+ "ColumnHeadersInner",
+ "DataAxisOption",
+ "DataSetAttributes",
+ "DataSetWindow",
+ "Datum",
+ "DefaultAggregation",
+ "DefaultInterpolation",
+ "DeleteResponse",
+ "Embeddings",
+ "FromOverride",
+ "GroupingInterval",
+ "GroupingIntervalOverride",
+ "GroupingIntervalOverrideOneOf",
+ "HALLink",
+ "HALLinkMethod",
+ "HALLinkRole",
+ "HTTPValidationError",
+ "HeaderArrayOption",
+ "Hierarchical",
+ "Interpolation",
+ "InterpolationMethod",
+ "InterpolationMethodOneOf",
+ "InterpolationMethodOneOf1",
+ "InterpolationMethodOneOf10",
+ "InterpolationMethodOneOf11",
+ "InterpolationMethodOneOf12",
+ "InterpolationMethodOneOf13",
+ "InterpolationMethodOneOf2",
+ "InterpolationMethodOneOf3",
+ "InterpolationMethodOneOf4",
+ "InterpolationMethodOneOf5",
+ "InterpolationMethodOneOf6",
+ "InterpolationMethodOneOf7",
+ "InterpolationMethodOneOf8",
+ "InterpolationMethodOneOf9",
+ "InterpolationSpec",
+ "Links",
+ "LocationInner",
+ "Message",
+ "MessageArguments",
+ "MessageLevel",
+ "MessageProperties",
+ "ObjectData",
+ "ObjectDataSet",
+ "ObjectDataValue",
+ "QueriesListResponse",
+ "QueryDefinition",
+ "QueryEntityInput",
+ "QueryExecutionMessage",
+ "QueryExecutionMessageLevel",
+ "QueryHALLinks",
+ "QueryInput",
+ "QueryListHALLinks",
+ "QueryListItem",
+ "QueryOutput",
+ "QueryResponse",
+ "QueryResult",
+ "QueryUpdateInput",
+ "Render",
+ "Render1",
+ "RenderMode",
+ "RenderModeOneOf",
+ "RenderModeOneOf1",
+ "RenderModeOneOf2",
+ "RenderModeOneOf3",
+ "RenderModeOneOf4",
+ "RenderModeOneOf5",
+ "RenderModeOneOf6",
+ "RenderModeOneOf7",
+ "RenderModeOneOf8",
+ "RenderModeOneOf9",
+ "ResponseDataSet",
+ "RowDataSet",
+ "RowDataSetDataAxis",
+ "RowHeader",
+ "RowHeadersInner",
+ "SeriesDataSet",
+ "SeriesSpec",
+ "TimeWindowFrom",
+ "TimeWindowUntil",
+ "ValidationError",
+ "Window",
+ "WindowOverride",
+]
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/__init__.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..3e0039c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/__init__.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_by_resource_and_metric.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_by_resource_and_metric.cpython-311.pyc
new file mode 100644
index 0000000..1ed0172
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_by_resource_and_metric.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_by_resource_or_metric.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_by_resource_or_metric.cpython-311.pyc
new file mode 100644
index 0000000..66fcd1a
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_by_resource_or_metric.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method.cpython-311.pyc
new file mode 100644
index 0000000..6070321
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of.cpython-311.pyc
new file mode 100644
index 0000000..1ea8073
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of1.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of1.cpython-311.pyc
new file mode 100644
index 0000000..441250a
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of1.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of2.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of2.cpython-311.pyc
new file mode 100644
index 0000000..bb2306c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of2.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of3.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of3.cpython-311.pyc
new file mode 100644
index 0000000..c433ff2
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of3.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of4.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of4.cpython-311.pyc
new file mode 100644
index 0000000..59e3e30
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of4.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of5.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of5.cpython-311.pyc
new file mode 100644
index 0000000..00aa56d
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of5.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of6.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of6.cpython-311.pyc
new file mode 100644
index 0000000..4736aaf
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of6.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of7.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of7.cpython-311.pyc
new file mode 100644
index 0000000..25586ae
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of7.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of8.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of8.cpython-311.pyc
new file mode 100644
index 0000000..b837462
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregation_method_one_of8.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregations_inner.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregations_inner.cpython-311.pyc
new file mode 100644
index 0000000..b595458
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/aggregations_inner.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/align_at.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/align_at.cpython-311.pyc
new file mode 100644
index 0000000..2c1013e
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/align_at.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/align_shift.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/align_shift.cpython-311.pyc
new file mode 100644
index 0000000..3e7e2b5
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/align_shift.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment.cpython-311.pyc
new file mode 100644
index 0000000..0876a18
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment_grid_interval.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment_grid_interval.cpython-311.pyc
new file mode 100644
index 0000000..dbfe5f4
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment_grid_interval.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment_timezone.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment_timezone.cpython-311.pyc
new file mode 100644
index 0000000..20de493
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/alignment_timezone.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/cause_exception.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/cause_exception.cpython-311.pyc
new file mode 100644
index 0000000..c38f9fc
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/cause_exception.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_data_set.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_data_set.cpython-311.pyc
new file mode 100644
index 0000000..c3e2356
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_data_set.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_data_set_data_axis.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_data_set_data_axis.cpython-311.pyc
new file mode 100644
index 0000000..19837ae
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_data_set_data_axis.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_header.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_header.cpython-311.pyc
new file mode 100644
index 0000000..e9556fd
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_header.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_headers_inner.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_headers_inner.cpython-311.pyc
new file mode 100644
index 0000000..9e8a47d
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/column_headers_inner.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_axis_option.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_axis_option.cpython-311.pyc
new file mode 100644
index 0000000..653eaa3
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_axis_option.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_set_attributes.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_set_attributes.cpython-311.pyc
new file mode 100644
index 0000000..39ac627
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_set_attributes.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_set_window.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_set_window.cpython-311.pyc
new file mode 100644
index 0000000..6dc64dd
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/data_set_window.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/datum.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/datum.cpython-311.pyc
new file mode 100644
index 0000000..6aa6e3c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/datum.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/default_aggregation.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/default_aggregation.cpython-311.pyc
new file mode 100644
index 0000000..65160d4
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/default_aggregation.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/default_interpolation.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/default_interpolation.cpython-311.pyc
new file mode 100644
index 0000000..a29e5b9
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/default_interpolation.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/delete_response.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/delete_response.cpython-311.pyc
new file mode 100644
index 0000000..0b50137
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/delete_response.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/embeddings.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/embeddings.cpython-311.pyc
new file mode 100644
index 0000000..34209f1
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/embeddings.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/from_override.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/from_override.cpython-311.pyc
new file mode 100644
index 0000000..1ef22f1
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/from_override.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval.cpython-311.pyc
new file mode 100644
index 0000000..743fffe
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval_override.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval_override.cpython-311.pyc
new file mode 100644
index 0000000..5ffc839
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval_override.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval_override_one_of.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval_override_one_of.cpython-311.pyc
new file mode 100644
index 0000000..8ffa160
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/grouping_interval_override_one_of.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link.cpython-311.pyc
new file mode 100644
index 0000000..bfc669a
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link_method.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link_method.cpython-311.pyc
new file mode 100644
index 0000000..743b2dc
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link_method.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link_role.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link_role.cpython-311.pyc
new file mode 100644
index 0000000..39826f3
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hal_link_role.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/header_array_option.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/header_array_option.cpython-311.pyc
new file mode 100644
index 0000000..70623b0
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/header_array_option.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hierarchical.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hierarchical.cpython-311.pyc
new file mode 100644
index 0000000..f396d59
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/hierarchical.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/http_validation_error.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/http_validation_error.cpython-311.pyc
new file mode 100644
index 0000000..61b348c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/http_validation_error.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation.cpython-311.pyc
new file mode 100644
index 0000000..adeff9e
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method.cpython-311.pyc
new file mode 100644
index 0000000..ded3935
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of.cpython-311.pyc
new file mode 100644
index 0000000..1ece0c6
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of1.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of1.cpython-311.pyc
new file mode 100644
index 0000000..fac5e88
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of1.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of10.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of10.cpython-311.pyc
new file mode 100644
index 0000000..edec2ca
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of10.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of11.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of11.cpython-311.pyc
new file mode 100644
index 0000000..06e7b95
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of11.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of12.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of12.cpython-311.pyc
new file mode 100644
index 0000000..ddf3c5c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of12.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of13.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of13.cpython-311.pyc
new file mode 100644
index 0000000..f5cae8f
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of13.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of2.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of2.cpython-311.pyc
new file mode 100644
index 0000000..e649939
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of2.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of3.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of3.cpython-311.pyc
new file mode 100644
index 0000000..f9a1c1b
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of3.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of4.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of4.cpython-311.pyc
new file mode 100644
index 0000000..3d280f9
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of4.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of5.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of5.cpython-311.pyc
new file mode 100644
index 0000000..01a7c63
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of5.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of6.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of6.cpython-311.pyc
new file mode 100644
index 0000000..ad7c1e8
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of6.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of7.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of7.cpython-311.pyc
new file mode 100644
index 0000000..c310434
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of7.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of8.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of8.cpython-311.pyc
new file mode 100644
index 0000000..f9f25c3
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of8.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of9.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of9.cpython-311.pyc
new file mode 100644
index 0000000..249df59
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_method_one_of9.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_spec.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_spec.cpython-311.pyc
new file mode 100644
index 0000000..6366850
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/interpolation_spec.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/links.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/links.cpython-311.pyc
new file mode 100644
index 0000000..f88720b
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/links.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/location_inner.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/location_inner.cpython-311.pyc
new file mode 100644
index 0000000..3b5ea0b
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/location_inner.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message.cpython-311.pyc
new file mode 100644
index 0000000..e00cc24
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_arguments.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_arguments.cpython-311.pyc
new file mode 100644
index 0000000..40dd11c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_arguments.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_level.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_level.cpython-311.pyc
new file mode 100644
index 0000000..06bfa47
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_level.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_properties.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_properties.cpython-311.pyc
new file mode 100644
index 0000000..3c298d7
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/message_properties.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data.cpython-311.pyc
new file mode 100644
index 0000000..6f554b3
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data_set.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data_set.cpython-311.pyc
new file mode 100644
index 0000000..2951264
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data_set.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data_value.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data_value.cpython-311.pyc
new file mode 100644
index 0000000..e4890ff
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/object_data_value.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/queries_list_response.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/queries_list_response.cpython-311.pyc
new file mode 100644
index 0000000..3dacfc8
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/queries_list_response.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_definition.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_definition.cpython-311.pyc
new file mode 100644
index 0000000..cd4cf21
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_definition.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_entity_input.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_entity_input.cpython-311.pyc
new file mode 100644
index 0000000..cbd58d0
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_entity_input.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_execution_message.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_execution_message.cpython-311.pyc
new file mode 100644
index 0000000..ce6139b
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_execution_message.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_execution_message_level.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_execution_message_level.cpython-311.pyc
new file mode 100644
index 0000000..3559890
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_execution_message_level.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_hal_links.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_hal_links.cpython-311.pyc
new file mode 100644
index 0000000..8bb7cb2
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_hal_links.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_input.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_input.cpython-311.pyc
new file mode 100644
index 0000000..2044848
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_input.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_list_hal_links.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_list_hal_links.cpython-311.pyc
new file mode 100644
index 0000000..c064518
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_list_hal_links.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_list_item.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_list_item.cpython-311.pyc
new file mode 100644
index 0000000..d911f7f
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_list_item.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_output.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_output.cpython-311.pyc
new file mode 100644
index 0000000..7e995ff
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_output.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_response.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_response.cpython-311.pyc
new file mode 100644
index 0000000..2603335
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_response.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_result.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_result.cpython-311.pyc
new file mode 100644
index 0000000..cf0d911
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_result.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_update_input.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_update_input.cpython-311.pyc
new file mode 100644
index 0000000..94d29af
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/query_update_input.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render.cpython-311.pyc
new file mode 100644
index 0000000..08c764f
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render1.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render1.cpython-311.pyc
new file mode 100644
index 0000000..17464aa
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render1.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode.cpython-311.pyc
new file mode 100644
index 0000000..a4a7819
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of.cpython-311.pyc
new file mode 100644
index 0000000..e114024
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of1.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of1.cpython-311.pyc
new file mode 100644
index 0000000..abfcefd
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of1.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of2.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of2.cpython-311.pyc
new file mode 100644
index 0000000..e34b022
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of2.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of3.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of3.cpython-311.pyc
new file mode 100644
index 0000000..4dd706d
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of3.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of4.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of4.cpython-311.pyc
new file mode 100644
index 0000000..cf6067d
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of4.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of5.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of5.cpython-311.pyc
new file mode 100644
index 0000000..510336e
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of5.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of6.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of6.cpython-311.pyc
new file mode 100644
index 0000000..c020fd7
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of6.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of7.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of7.cpython-311.pyc
new file mode 100644
index 0000000..0b9111e
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of7.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of8.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of8.cpython-311.pyc
new file mode 100644
index 0000000..88c3784
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of8.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of9.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of9.cpython-311.pyc
new file mode 100644
index 0000000..f2b0e54
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/render_mode_one_of9.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/response_data_set.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/response_data_set.cpython-311.pyc
new file mode 100644
index 0000000..9549395
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/response_data_set.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_data_set.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_data_set.cpython-311.pyc
new file mode 100644
index 0000000..7881152
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_data_set.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_data_set_data_axis.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_data_set_data_axis.cpython-311.pyc
new file mode 100644
index 0000000..9aff669
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_data_set_data_axis.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_header.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_header.cpython-311.pyc
new file mode 100644
index 0000000..a7937a0
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_header.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_headers_inner.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_headers_inner.cpython-311.pyc
new file mode 100644
index 0000000..91e685a
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/row_headers_inner.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/series_data_set.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/series_data_set.cpython-311.pyc
new file mode 100644
index 0000000..1b8b41b
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/series_data_set.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/series_spec.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/series_spec.cpython-311.pyc
new file mode 100644
index 0000000..fbc8467
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/series_spec.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/time_window_from.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/time_window_from.cpython-311.pyc
new file mode 100644
index 0000000..304277c
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/time_window_from.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/time_window_until.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/time_window_until.cpython-311.pyc
new file mode 100644
index 0000000..af88beb
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/time_window_until.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/validation_error.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/validation_error.cpython-311.pyc
new file mode 100644
index 0000000..b0a8e34
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/validation_error.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/window.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/window.cpython-311.pyc
new file mode 100644
index 0000000..6c4904d
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/window.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/window_override.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/window_override.cpython-311.pyc
new file mode 100644
index 0000000..d09d41b
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/models/__pycache__/window_override.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_by_resource_and_metric.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_by_resource_and_metric.py
new file mode 100644
index 0000000..20fad80
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_by_resource_and_metric.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Dict,
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.aggregation_by_resource_or_metric import AggregationByResourceOrMetric
+
+AggregationByResourceAndMetric = Union[
+ Annotated[
+ Dict[str, AggregationByResourceOrMetric],
+ "Aggregation methods specified per resource or metric.",
+ ]
+]
+"""AggregationByResourceAndMetric."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_by_resource_or_metric.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_by_resource_or_metric.py
new file mode 100644
index 0000000..cf7c3a2
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_by_resource_or_metric.py
@@ -0,0 +1,32 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ List,
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.aggregation_method import AggregationMethod
+from ..models.aggregations_inner import AggregationsInner
+
+AggregationByResourceOrMetric = Union[
+ Annotated[AggregationMethod, ""],
+ Annotated[
+ List[AggregationsInner], "Aggregation methods, leading to sepearate series."
+ ],
+]
+"""AggregationByResourceOrMetric."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method.py
new file mode 100644
index 0000000..aea55fa
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method.py
@@ -0,0 +1,43 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.aggregation_method_one_of import AggregationMethodOneOf
+from ..models.aggregation_method_one_of1 import AggregationMethodOneOf1
+from ..models.aggregation_method_one_of2 import AggregationMethodOneOf2
+from ..models.aggregation_method_one_of3 import AggregationMethodOneOf3
+from ..models.aggregation_method_one_of4 import AggregationMethodOneOf4
+from ..models.aggregation_method_one_of5 import AggregationMethodOneOf5
+from ..models.aggregation_method_one_of6 import AggregationMethodOneOf6
+from ..models.aggregation_method_one_of7 import AggregationMethodOneOf7
+from ..models.aggregation_method_one_of8 import AggregationMethodOneOf8
+
+AggregationMethod = Union[
+ Annotated[AggregationMethodOneOf, ""],
+ Annotated[AggregationMethodOneOf1, ""],
+ Annotated[AggregationMethodOneOf2, ""],
+ Annotated[AggregationMethodOneOf3, ""],
+ Annotated[AggregationMethodOneOf4, ""],
+ Annotated[AggregationMethodOneOf5, ""],
+ Annotated[AggregationMethodOneOf6, ""],
+ Annotated[AggregationMethodOneOf7, ""],
+ Annotated[AggregationMethodOneOf8, ""],
+]
+"""AggregationMethod."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of.py
new file mode 100644
index 0000000..1124f5f
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf(str, Enum):
+ """Use the first value (in time) to represent all data for the sample interval.."""
+
+ FIRST = "first"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of1.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of1.py
new file mode 100644
index 0000000..60c63cf
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of1.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf1(str, Enum):
+ """Use the last value (in time) to represent all data for the sample interval.."""
+
+ LAST = "last"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of2.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of2.py
new file mode 100644
index 0000000..97880a2
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of2.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf2(str, Enum):
+ """Aggregate data by the mean value: The sum of values divided by number of observations.."""
+
+ MEAN = "mean"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of3.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of3.py
new file mode 100644
index 0000000..eb84db3
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of3.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf3(str, Enum):
+ """Aggregate data by the median value: The n/2-th value when ordered, the average of the (n-1)/2-th and (n+1)/2-th value when n is uneven.."""
+
+ MEDIAN = "median"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of4.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of4.py
new file mode 100644
index 0000000..4b7debb
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of4.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf4(str, Enum):
+ """The sum of all values summarizes the data for the sample interval.."""
+
+ SUM = "sum"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of5.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of5.py
new file mode 100644
index 0000000..68878e4
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of5.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf5(str, Enum):
+ """Use the count of observations in the sample interval.."""
+
+ COUNT = "count"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of6.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of6.py
new file mode 100644
index 0000000..bce2635
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of6.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf6(str, Enum):
+ """Use the standard deviation of all observations in the sample interval.."""
+
+ STD = "std"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of7.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of7.py
new file mode 100644
index 0000000..fcf428b
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of7.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf7(str, Enum):
+ """Use the maximum of all values in the sample interval.."""
+
+ MAX = "max"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of8.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of8.py
new file mode 100644
index 0000000..b8cfa9d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregation_method_one_of8.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AggregationMethodOneOf8(str, Enum):
+ """Use the minimum of all values in the sample interval.."""
+
+ MIN = "min"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregations_inner.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregations_inner.py
new file mode 100644
index 0000000..f78b570
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/aggregations_inner.py
@@ -0,0 +1,25 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.aggregation_method import AggregationMethod
+
+AggregationsInner = Union[Annotated[AggregationMethod, ""]]
+"""AggregationsInner."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/align_at.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/align_at.py
new file mode 100644
index 0000000..b774886
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/align_at.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AlignAt(str, Enum):
+ """Possible values for `align.at`. * 'grid' Align to a fixed grid (possibly using timezone information) * 'from' Align a the `from` boundary * 'until' Align a the `until` boundary * 'boundary' Align a the `from` boundary if specified, otherwise the `until` boundary. When not specified, 'grid' is used.."""
+
+ GRID = "grid"
+ BOUNDARY = "boundary"
+ FROM = "from"
+ UNTIL = "until"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/align_shift.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/align_shift.py
new file mode 100644
index 0000000..917b43c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/align_shift.py
@@ -0,0 +1,25 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class AlignShift(str, Enum):
+ """Possible values for `align.shift`. * 'backward': keep the window size of the original interval specification, shifting back. * 'forward': keep the window size of the original interval specification, shifting forward. * 'wrap': enlarge the window size to include all of the original interval. When not specified, 'backward' is used.."""
+
+ BACKWARD = "backward"
+ FORWARD = "forward"
+ WRAP = "wrap"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment.py
new file mode 100644
index 0000000..57b6197
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.align_at import AlignAt
+from ..models.align_shift import AlignShift
+from ..models.alignment_grid_interval import AlignmentGridInterval
+from ..models.alignment_timezone import AlignmentTimezone
+
+
+class Alignment(WaylayBaseModel):
+ """Aggregation Alignment Options. Specifies how the aggregation grid is aligned.."""
+
+ at: AlignAt | None = None
+ shift: AlignShift | None = None
+ freq: AlignmentGridInterval | None = None
+ timezone: AlignmentTimezone | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment_grid_interval.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment_grid_interval.py
new file mode 100644
index 0000000..806de12
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment_grid_interval.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.grouping_interval_override_one_of import GroupingIntervalOverrideOneOf
+
+AlignmentGridInterval = Union[
+ Annotated[
+ str,
+ "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ],
+ Annotated[GroupingIntervalOverrideOneOf, ""],
+]
+""" Defines the grid used to align the aggregation window. The window will align at whole-unit multiples of this interval. For intervals like `PT1D`, that are timezone-dependent, use the `align.timezone` to fix the absolute timestamp of the grid boundaries. If not specified, defaults to the `freq` aggregation interval. ."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment_timezone.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment_timezone.py
new file mode 100644
index 0000000..ee932f9
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/alignment_timezone.py
@@ -0,0 +1,29 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+AlignmentTimezone = Union[
+ Annotated[
+ str,
+ "[ICANN timezone identifier](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones)",
+ ],
+ Annotated[str, "[UTC offset](https://en.wikipedia.org/wiki/UTC_offset)"],
+]
+""" The timezone to use when shifting boundaries, especially at day granularity. Also affects the rendering of timestamps when `render.iso_timestamp` is enabled. When not specified, the `UTC` timezone is used. ."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/cause_exception.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/cause_exception.py
new file mode 100644
index 0000000..1739613
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/cause_exception.py
@@ -0,0 +1,32 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class CauseException(WaylayBaseModel):
+ """Describes the exception that caused a message.."""
+
+ type: StrictStr
+ message: StrictStr
+ stacktrace: List[StrictStr]
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/column_data_set.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_data_set.py
new file mode 100644
index 0000000..f439cc5
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_data_set.py
@@ -0,0 +1,44 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.column_data_set_data_axis import ColumnDataSetDataAxis
+from ..models.data_set_attributes import DataSetAttributes
+from ..models.data_set_window import DataSetWindow
+from ..models.datum import Datum
+from ..models.row_headers_inner import RowHeadersInner
+
+
+class ColumnDataSet(WaylayBaseModel):
+ """Column-oriented dataset with rows header. Timeseries data layout with a rows header containing the index data. The data array contains series data prefixed by series attributes. The `rows` index is prefix by the names of these series attributes. Result for render options `data_axis=row` and `header_array=column`.."""
+
+ attributes: DataSetAttributes | None = None
+ window_spec: DataSetWindow | None = None
+ data_axis: ColumnDataSetDataAxis | None = ColumnDataSetDataAxis.ROW
+ rows: List[RowHeadersInner] = Field(
+ description="Header Attributes for the index data. The initial string-valued headers (normally `resource`, `metric`,`aggregation`) indicate that row to contain series attributes. The remaining object-valued row headers contain the index data."
+ )
+ data: List[List[Datum]] = Field(
+ description="All metric observation values for a single series. Prefixed by the series attributes."
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/column_data_set_data_axis.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_data_set_data_axis.py
new file mode 100644
index 0000000..a7aeb4a
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_data_set_data_axis.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class ColumnDataSetDataAxis(str, Enum):
+ """ColumnDataSetDataAxis."""
+
+ ROW = "row"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/column_header.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_header.py
new file mode 100644
index 0000000..269b5dd
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_header.py
@@ -0,0 +1,30 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class ColumnHeader(WaylayBaseModel):
+ """Column attributes. Attributes that identify and describe the data in this column.."""
+
+ resource: StrictStr
+ metric: StrictStr
+ aggregation: StrictStr | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/column_headers_inner.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_headers_inner.py
new file mode 100644
index 0000000..258fad1
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/column_headers_inner.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.column_header import ColumnHeader
+
+ColumnHeadersInner = Union[
+ Annotated[
+ str,
+ "Header for a column containing a (representation of) the row index value. These headers precede the header attributes for row data.",
+ ],
+ Annotated[ColumnHeader, ""],
+]
+"""ColumnHeadersInner."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/data_axis_option.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/data_axis_option.py
new file mode 100644
index 0000000..d3c339d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/data_axis_option.py
@@ -0,0 +1,24 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class DataAxisOption(str, Enum):
+ """Allowed values for the render.data_axis option.."""
+
+ ROW = "row"
+ COLUMN = "column"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/data_set_attributes.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/data_set_attributes.py
new file mode 100644
index 0000000..7ba134c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/data_set_attributes.py
@@ -0,0 +1,32 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class DataSetAttributes(WaylayBaseModel):
+ """Data Set Attributes. Data attributes that apply to all data in this set.."""
+
+ role: StrictStr | None = Field(
+ default=None,
+ description="The role of series specification that was used to compile this data set.",
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/data_set_window.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/data_set_window.py
new file mode 100644
index 0000000..c5f7720
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/data_set_window.py
@@ -0,0 +1,36 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class DataSetWindow(WaylayBaseModel):
+ """Data Window. Statistics of the time axis of a data set. Present with render option `include_window_spec=true`.\",."""
+
+ until: StrictInt = Field(
+ description="Exclusive higher bound of the time axis in unix epoch milliseconds."
+ )
+ window: StrictStr = Field(description="Time axis length as ISO8601 period.")
+ freq: StrictStr = Field(
+ description="Time axis aggregation interval as an ISO8601 period ."
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/datum.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/datum.py
new file mode 100644
index 0000000..5abb53f
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/datum.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+Datum = Union[Annotated[float, ""], Annotated[str, ""], Annotated[bool, ""]]
+"""A single metric value for a timeseries. A null value indicates that no (aggregated/interpolated) value exists for the corresponding timestamp.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/default_aggregation.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/default_aggregation.py
new file mode 100644
index 0000000..145dd32
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/default_aggregation.py
@@ -0,0 +1,43 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Dict,
+ List,
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.aggregation_by_resource_and_metric import AggregationByResourceAndMetric
+from ..models.aggregation_by_resource_or_metric import AggregationByResourceOrMetric
+from ..models.aggregation_method import AggregationMethod
+from ..models.aggregations_inner import AggregationsInner
+
+DefaultAggregation = Union[
+ Annotated[AggregationMethod, ""],
+ Annotated[
+ List[AggregationsInner], "Aggregation methods, leading to sepearate series."
+ ],
+ Annotated[
+ Dict[str, AggregationByResourceOrMetric],
+ "Aggregation methods specified per resource or metric.",
+ ],
+ Annotated[
+ Dict[str, AggregationByResourceAndMetric],
+ "Aggregation methods specified per resource and metric.",
+ ],
+]
+"""Default aggregation method(s) for the series in the query.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/default_interpolation.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/default_interpolation.py
new file mode 100644
index 0000000..576c90d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/default_interpolation.py
@@ -0,0 +1,28 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.interpolation_method import InterpolationMethod
+from ..models.interpolation_spec import InterpolationSpec
+
+DefaultInterpolation = Union[
+ Annotated[InterpolationMethod, ""], Annotated[InterpolationSpec, ""]
+]
+"""Default Interpolation method for the series (if aggregated).."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/delete_response.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/delete_response.py
new file mode 100644
index 0000000..226aaa8
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/delete_response.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import Dict, List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.embeddings import Embeddings
+from ..models.links import Links
+from ..models.message import Message
+
+
+class DeleteResponse(WaylayBaseModel):
+ """Confirmation of a delete request.."""
+
+ messages: List[Message] | None = None
+ links: Dict[str, Links] | None = Field(
+ default=None, description="HAL links, indexed by link relation.", alias="_links"
+ )
+ embeddings: Dict[str, Embeddings] | None = Field(
+ default=None,
+ description="Hal embeddings, indexed by relation.",
+ alias="_embeddings",
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/embeddings.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/embeddings.py
new file mode 100644
index 0000000..12cd697
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/embeddings.py
@@ -0,0 +1,28 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Dict,
+ List,
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+Embeddings = Union[
+ Annotated[Dict[str, object], "Any embedded representation in a HAL response."],
+ Annotated[List[Dict[str, object]], ""],
+]
+"""Embeddings."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/from_override.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/from_override.py
new file mode 100644
index 0000000..87017aa
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/from_override.py
@@ -0,0 +1,34 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+FromOverride = Union[
+ Annotated[
+ datetime,
+ "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ ],
+ Annotated[int, "Absolute timestamp milliseconds in unix epoch since 1970-01-01."],
+ Annotated[
+ str,
+ "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ],
+]
+"""FromOverride."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval.py
new file mode 100644
index 0000000..85bf912
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.grouping_interval_override_one_of import GroupingIntervalOverrideOneOf
+
+GroupingInterval = Union[
+ Annotated[
+ str,
+ "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ],
+ Annotated[GroupingIntervalOverrideOneOf, ""],
+]
+"""Interval used to aggregate or regularize data. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval_override.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval_override.py
new file mode 100644
index 0000000..1022229
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval_override.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.grouping_interval_override_one_of import GroupingIntervalOverrideOneOf
+
+GroupingIntervalOverride = Union[
+ Annotated[
+ str,
+ "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ],
+ Annotated[GroupingIntervalOverrideOneOf, ""],
+]
+"""Override for the `freq` query attribute.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval_override_one_of.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval_override_one_of.py
new file mode 100644
index 0000000..b21da63
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/grouping_interval_override_one_of.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class GroupingIntervalOverrideOneOf(str, Enum):
+ """When `inferred` is specified, the frequency of aggregation will be inferred from the main/first time series. This can be used to regularize the time series."""
+
+ INFERRED = "inferred"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link.py
new file mode 100644
index 0000000..b78704c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.hal_link_method import HALLinkMethod
+
+
+class HALLink(WaylayBaseModel):
+ """A link target in a HAL response.."""
+
+ href: StrictStr = Field(description="Target url for this link.")
+ type: StrictStr | None = Field(
+ default=None, description="Type of the resource referenced by this link."
+ )
+ method: HALLinkMethod | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link_method.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link_method.py
new file mode 100644
index 0000000..4bf55f7
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link_method.py
@@ -0,0 +1,27 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class HALLinkMethod(str, Enum):
+ """An http method that can be specified in a HAL link.."""
+
+ GET = "GET"
+ POST = "POST"
+ PUT = "PUT"
+ DELETE = "DELETE"
+ PATCH = "PATCH"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link_role.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link_role.py
new file mode 100644
index 0000000..ca755a1
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/hal_link_role.py
@@ -0,0 +1,28 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class HALLinkRole(str, Enum):
+ """Supported link and embedding roles in HAL representations.."""
+
+ SELF = "self"
+ FIRST = "first"
+ PREV = "prev"
+ NEXT = "next"
+ LAST = "last"
+ EXECUTE = "execute"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/header_array_option.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/header_array_option.py
new file mode 100644
index 0000000..ad220a4
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/header_array_option.py
@@ -0,0 +1,24 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class HeaderArrayOption(str, Enum):
+ """Allowed values for the render.header_array option.."""
+
+ ROW = "row"
+ COLUMN = "column"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/hierarchical.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/hierarchical.py
new file mode 100644
index 0000000..9afb977
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/hierarchical.py
@@ -0,0 +1,24 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ List,
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+Hierarchical = Union[Annotated[bool, ""], Annotated[List[str], ""]]
+"""if true, use hierarchical objects to represent multiple row (or column) dimensions, otherwise multi-keys get concatenated with a dot-delimiter. If the value is a list, only these levels are kept as separate levels, while remaining levels get concatenated keys."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/http_validation_error.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/http_validation_error.py
new file mode 100644
index 0000000..22308a1
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/http_validation_error.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.validation_error import ValidationError
+
+
+class HTTPValidationError(WaylayBaseModel):
+ """HTTPValidationError."""
+
+ detail: List[ValidationError] | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="ignore"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation.py
new file mode 100644
index 0000000..a3adbf0
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation.py
@@ -0,0 +1,28 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.interpolation_method import InterpolationMethod
+from ..models.interpolation_spec import InterpolationSpec
+
+Interpolation = Union[
+ Annotated[InterpolationMethod, ""], Annotated[InterpolationSpec, ""]
+]
+"""Interpolation."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method.py
new file mode 100644
index 0000000..88015ef
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method.py
@@ -0,0 +1,53 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.interpolation_method_one_of import InterpolationMethodOneOf
+from ..models.interpolation_method_one_of1 import InterpolationMethodOneOf1
+from ..models.interpolation_method_one_of2 import InterpolationMethodOneOf2
+from ..models.interpolation_method_one_of3 import InterpolationMethodOneOf3
+from ..models.interpolation_method_one_of4 import InterpolationMethodOneOf4
+from ..models.interpolation_method_one_of5 import InterpolationMethodOneOf5
+from ..models.interpolation_method_one_of6 import InterpolationMethodOneOf6
+from ..models.interpolation_method_one_of7 import InterpolationMethodOneOf7
+from ..models.interpolation_method_one_of8 import InterpolationMethodOneOf8
+from ..models.interpolation_method_one_of9 import InterpolationMethodOneOf9
+from ..models.interpolation_method_one_of10 import InterpolationMethodOneOf10
+from ..models.interpolation_method_one_of11 import InterpolationMethodOneOf11
+from ..models.interpolation_method_one_of12 import InterpolationMethodOneOf12
+from ..models.interpolation_method_one_of13 import InterpolationMethodOneOf13
+
+InterpolationMethod = Union[
+ Annotated[InterpolationMethodOneOf, ""],
+ Annotated[InterpolationMethodOneOf1, ""],
+ Annotated[InterpolationMethodOneOf2, ""],
+ Annotated[InterpolationMethodOneOf3, ""],
+ Annotated[InterpolationMethodOneOf4, ""],
+ Annotated[InterpolationMethodOneOf5, ""],
+ Annotated[InterpolationMethodOneOf6, ""],
+ Annotated[InterpolationMethodOneOf7, ""],
+ Annotated[InterpolationMethodOneOf8, ""],
+ Annotated[InterpolationMethodOneOf9, ""],
+ Annotated[InterpolationMethodOneOf10, ""],
+ Annotated[InterpolationMethodOneOf11, ""],
+ Annotated[InterpolationMethodOneOf12, ""],
+ Annotated[InterpolationMethodOneOf13, ""],
+]
+"""InterpolationMethod."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of.py
new file mode 100644
index 0000000..3d20cad
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf(str, Enum):
+ """Interpolate with the value of the first observed point. This method also extrapolates.."""
+
+ PAD = "pad"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of1.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of1.py
new file mode 100644
index 0000000..f9d2eaf
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of1.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf1(str, Enum):
+ """Interpolate with a fixed, user-specified value. This method also extrapolates.."""
+
+ FIXED = "fixed"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of10.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of10.py
new file mode 100644
index 0000000..70f3251
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of10.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf10(str, Enum):
+ """Interpolate with a spline function of a user-specified order.."""
+
+ SPLINE = "spline"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of11.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of11.py
new file mode 100644
index 0000000..ef92d1c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of11.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf11(str, Enum):
+ """Interpolate with the derivative of order 1.."""
+
+ FROM_DERIVATIVES = "from_derivatives"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of12.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of12.py
new file mode 100644
index 0000000..544ef7c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of12.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf12(str, Enum):
+ """Interpolate with a piecewise cubic spline function.."""
+
+ PCHIP = "pchip"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of13.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of13.py
new file mode 100644
index 0000000..84e800f
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of13.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf13(str, Enum):
+ """Interpolate with a non-smoothing spline of order 2, called Akima interpolation.."""
+
+ AKIMA = "akima"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of2.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of2.py
new file mode 100644
index 0000000..65b89fa
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of2.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf2(str, Enum):
+ """Same as pad, but using the last observed value. This method also extrapolates."""
+
+ BACKFILL = "backfill"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of3.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of3.py
new file mode 100644
index 0000000..556e37e
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of3.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf3(str, Enum):
+ """Linearly go from the first observed value of the gap to the last observed oneThis method also extrapolates."""
+
+ LINEAR = "linear"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of4.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of4.py
new file mode 100644
index 0000000..c29bb4c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of4.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf4(str, Enum):
+ """Use the value that is closest in time.."""
+
+ NEAREST = "nearest"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of5.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of5.py
new file mode 100644
index 0000000..edeb2f1
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of5.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf5(str, Enum):
+ """Interpolate with a spline function of order 0, which is a piecewise polynomial.."""
+
+ ZERO = "zero"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of6.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of6.py
new file mode 100644
index 0000000..5cef45a
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of6.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf6(str, Enum):
+ """Interpolate with a spline function of order 1, which is a piecewise polynomial.."""
+
+ SLINEAR = "slinear"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of7.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of7.py
new file mode 100644
index 0000000..44afd80
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of7.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf7(str, Enum):
+ """Interpolate with a spline function of order 2, which is a piecewise polynomial.."""
+
+ QUADRATIC = "quadratic"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of8.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of8.py
new file mode 100644
index 0000000..b6bef66
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of8.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf8(str, Enum):
+ """Interpolate with a spline function of order 3, which is a piecewise polynomial.."""
+
+ CUBIC = "cubic"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of9.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of9.py
new file mode 100644
index 0000000..10fef22
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_method_one_of9.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InterpolationMethodOneOf9(str, Enum):
+ """Interpolate with a polynomial of the lowest possible degree passing trough the data points.."""
+
+ POLYNOMIAL = "polynomial"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_spec.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_spec.py
new file mode 100644
index 0000000..2f6a0a0
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/interpolation_spec.py
@@ -0,0 +1,39 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.interpolation_method import InterpolationMethod
+
+
+class InterpolationSpec(WaylayBaseModel):
+ """Defines whether, and how to treat missing values. This can occur in two circumstances when aggregating (setting a sample frequency): * missing values: if there are missing (or invalid) values stored for a given freq-interval, \"interpolation\" specifies how to compute these. * down-sampling: when the specified freq is smaller than the series’ actual frequency. \"interpolation\" specifies how to compute intermediate values.."""
+
+ method: InterpolationMethod
+ value: StrictInt | None = Field(
+ default=None,
+ description="Optional parameter value for the interpolation method (see method description).",
+ )
+ order: StrictInt | None = Field(
+ default=None,
+ description="Optional order parameter for the interpolation method (see method description).",
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/links.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/links.py
new file mode 100644
index 0000000..2e82141
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/links.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ List,
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.hal_link import HALLink
+
+Links = Union[Annotated[HALLink, ""], Annotated[List[HALLink], ""]]
+"""Links."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/location_inner.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/location_inner.py
new file mode 100644
index 0000000..47bea4d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/location_inner.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+LocationInner = Union[Annotated[str, ""], Annotated[int, ""]]
+"""LocationInner."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/message.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/message.py
new file mode 100644
index 0000000..7d2dc39
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/message.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any, Dict
+
+from pydantic import (
+ ConfigDict,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.message_level import MessageLevel
+
+
+class Message(WaylayBaseModel):
+ """Individual (info/warning/error) message in a response.."""
+
+ code: StrictStr | None = None
+ message: StrictStr
+ level: MessageLevel | None = MessageLevel.INFO
+ args: Dict[str, Any] | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="ignore"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/message_arguments.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/message_arguments.py
new file mode 100644
index 0000000..fa5d17d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/message_arguments.py
@@ -0,0 +1,25 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.message_properties import MessageProperties
+
+MessageArguments = Union[Annotated[str, ""], Annotated[MessageProperties, ""]]
+"""MessageArguments."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/message_level.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/message_level.py
new file mode 100644
index 0000000..05f3671
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/message_level.py
@@ -0,0 +1,27 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class MessageLevel(str, Enum):
+ """MessageLevel."""
+
+ DEBUG = "debug"
+ INFO = "info"
+ WARNING = "warning"
+ ERROR = "error"
+ FATAL = "fatal"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/message_properties.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/message_properties.py
new file mode 100644
index 0000000..3bb7616
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/message_properties.py
@@ -0,0 +1,29 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class MessageProperties(WaylayBaseModel):
+ """Additional message arguments.."""
+
+ resource: StrictStr | None = None
+ metric: StrictStr | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/object.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/object.py
new file mode 100644
index 0000000..6cbd270
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/object.py
@@ -0,0 +1,4 @@
+# workaround definition to resolve the import
+# from ..models.object import object
+# generated by `vendorExtensions.x-py-model-imports`
+object = object
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data.py
new file mode 100644
index 0000000..514885c
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data.py
@@ -0,0 +1,54 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class ObjectData(WaylayBaseModel):
+ """Result data for a timestamp in object format.."""
+
+ timestamp: StrictInt = Field(description="Unix epoch milliseconds timestamp.")
+ timestamp_iso: datetime | None = Field(
+ default=None,
+ description="ISO8601 rendering of the timestamp, present when `render.iso_timestamp=true`",
+ )
+ role: StrictStr | None = Field(
+ default=None,
+ description="The role of series specification that was used to compile this data set.",
+ )
+ resource: StrictStr | None = Field(
+ default=None, description="Series resource id, if applicable for all values."
+ )
+ metric: StrictStr | None = Field(
+ default=None, description="Series metric, if applicable for all values."
+ )
+ aggregation: StrictStr | None = Field(
+ default=None, description="Series aggregation, if applicable for all values."
+ )
+ levels: List[StrictStr] | None = Field(
+ default=None,
+ description="Attribute level names used to key the values for this observation. Levels that are flattened have a dot-separated key. If all observations have the same attribute for a level, that level might be omitted.",
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data_set.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data_set.py
new file mode 100644
index 0000000..75fff8e
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data_set.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.data_set_attributes import DataSetAttributes
+from ..models.data_set_window import DataSetWindow
+from ..models.object_data import ObjectData
+
+
+class ObjectDataSet(WaylayBaseModel):
+ """Data result in object format. Result item when render option `render.header_array` is not set. The data values are keyed by their attributes (`resource`, `metric`, `aggregation`), according to the render options: * _hierachical_: for each level, a sub-object is created (e.g. `render.mode=hier_dict`) * _flattened_: the attributes are '.'-separated concatenation of the attributes (e.g `render.mode=flat_dict`) * _mixed_: (.e.g. `render.mode=metric_flat_dict`) a single level (e.g. `metric`) is used as main key, any remaining levels (`resource`,`aggregation`) are indicated with a flattened subkey. When `render.rollup=true`, the attribute levels that are the same for all series are not used as key, but reported as a data or table attribute.."""
+
+ attributes: DataSetAttributes | None = None
+ window_spec: DataSetWindow | None = None
+ data: List[ObjectData]
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data_value.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data_value.py
new file mode 100644
index 0000000..f51d477
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/object_data_value.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.datum import Datum
+
+ObjectDataValue = Union[
+ Annotated[
+ object,
+ "Values for the series whose attributes corresponds with the key. Keyed by sub-levels.",
+ ],
+ Annotated[Datum, ""],
+]
+"""ObjectDataValue."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/py.typed b/waylay-sdk-queries-types/src/waylay/services/queries/models/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/queries_list_response.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/queries_list_response.py
new file mode 100644
index 0000000..c2048c5
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/queries_list_response.py
@@ -0,0 +1,52 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.message import Message
+from ..models.query_list_hal_links import QueryListHALLinks
+from ..models.query_list_item import QueryListItem
+
+
+class QueriesListResponse(WaylayBaseModel):
+ """Listing of named queries, with paging links.."""
+
+ messages: List[Message] | None = None
+ queries: List[QueryListItem] = Field(
+ description="One page of matching query definitions."
+ )
+ count: StrictInt = Field(
+ description="Number of query definitions returned in the current response."
+ )
+ offset: StrictInt = Field(
+ description="Offset in the full listing (skipped definitions)."
+ )
+ limit: StrictInt = Field(
+ description="Maximal number of query definitions returned in one response."
+ )
+ total_count: StrictInt | None = Field(
+ default=None,
+ description="Total number of query definitions matching the filter.",
+ )
+ links: QueryListHALLinks = Field(alias="_links")
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_definition.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_definition.py
new file mode 100644
index 0000000..f4f763d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_definition.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.query_input import QueryInput
+from ..models.query_update_input import QueryUpdateInput
+
+QueryDefinition = Union[Annotated[QueryUpdateInput, ""], Annotated[QueryInput, ""]]
+"""QueryDefinition."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_entity_input.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_entity_input.py
new file mode 100644
index 0000000..040f6e4
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_entity_input.py
@@ -0,0 +1,37 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any, Dict
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.query_input import QueryInput
+
+
+class QueryEntityInput(WaylayBaseModel):
+ """Input data to create a query definition.."""
+
+ name: StrictStr = Field(description="Name of the stored query definition.")
+ meta: Dict[str, Any] | None = Field(
+ default=None, description="User metadata for the query definition."
+ )
+ query: QueryInput
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_execution_message.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_execution_message.py
new file mode 100644
index 0000000..1663981
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_execution_message.py
@@ -0,0 +1,43 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.cause_exception import CauseException
+from ..models.message_arguments import MessageArguments
+from ..models.query_execution_message_level import QueryExecutionMessageLevel
+
+
+class QueryExecutionMessage(WaylayBaseModel):
+ """A message object that informs or warns about a query execution issue.."""
+
+ message: StrictStr = Field(description="A human readable message.")
+ level: QueryExecutionMessageLevel
+ timestamp: datetime
+ action: StrictStr = Field(
+ description="The request action that caused this message."
+ )
+ category: StrictStr = Field(description="The subsystem that issued this message.")
+ properties: MessageArguments | None = None
+ exception: CauseException | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_execution_message_level.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_execution_message_level.py
new file mode 100644
index 0000000..9cf6928
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_execution_message_level.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class QueryExecutionMessageLevel(str, Enum):
+ """QueryExecutionMessageLevel."""
+
+ DEBUG = "debug"
+ INFO = "info"
+ WARNING = "warning"
+ ERROR = "error"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_hal_links.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_hal_links.py
new file mode 100644
index 0000000..3307223
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_hal_links.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.hal_link import HALLink
+
+
+class QueryHALLinks(WaylayBaseModel):
+ """HAL Links for a query entity.."""
+
+ var_self: HALLink = Field(alias="self")
+ execute: HALLink
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_input.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_input.py
new file mode 100644
index 0000000..291f029
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_input.py
@@ -0,0 +1,63 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.alignment import Alignment
+from ..models.default_aggregation import DefaultAggregation
+from ..models.default_interpolation import DefaultInterpolation
+from ..models.grouping_interval import GroupingInterval
+from ..models.render import Render
+from ..models.series_spec import SeriesSpec
+from ..models.time_window_from import TimeWindowFrom
+from ..models.time_window_until import TimeWindowUntil
+from ..models.window import Window
+
+
+class QueryInput(WaylayBaseModel):
+ """Query definition for a Waylay analytics query. See also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).."""
+
+ resource: StrictStr | None = Field(
+ default=None, description="Default resource for the series in the query."
+ )
+ metric: StrictStr | None = Field(
+ default=None, description="Default metric for the series in the query."
+ )
+ aggregation: DefaultAggregation | None = None
+ interpolation: DefaultInterpolation | None = None
+ freq: GroupingInterval | None = None
+ var_from: TimeWindowFrom | None = Field(default=None, alias="from")
+ until: TimeWindowUntil | None = None
+ window: Window | None = None
+ periods: StrictInt | None = Field(
+ default=None,
+ description="The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ )
+ align: Alignment | None = None
+ data: List[SeriesSpec] | None = Field(
+ default=None,
+ description="List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ).",
+ )
+ render: Render | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_list_hal_links.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_list_hal_links.py
new file mode 100644
index 0000000..4f8e349
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_list_hal_links.py
@@ -0,0 +1,34 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.hal_link import HALLink
+
+
+class QueryListHALLinks(WaylayBaseModel):
+ """HAL Links for a query entity.."""
+
+ var_self: HALLink = Field(alias="self")
+ first: HALLink | None = None
+ prev: HALLink | None = None
+ next: HALLink | None = None
+ last: HALLink | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_list_item.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_list_item.py
new file mode 100644
index 0000000..bf749da
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_list_item.py
@@ -0,0 +1,40 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any, Dict
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.query_hal_links import QueryHALLinks
+
+
+class QueryListItem(WaylayBaseModel):
+ """Listing of a query definition item.."""
+
+ links: QueryHALLinks = Field(alias="_links")
+ attrs: Dict[str, Any] = Field(
+ description="System provided metadata for the query definition."
+ )
+ name: StrictStr = Field(description="Name of the stored query definition.")
+ meta: Dict[str, Any] | None = Field(
+ default=None, description="User metadata for the query definition."
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_output.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_output.py
new file mode 100644
index 0000000..2bf8f09
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_output.py
@@ -0,0 +1,63 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.alignment import Alignment
+from ..models.default_aggregation import DefaultAggregation
+from ..models.default_interpolation import DefaultInterpolation
+from ..models.grouping_interval import GroupingInterval
+from ..models.render import Render
+from ..models.series_spec import SeriesSpec
+from ..models.time_window_from import TimeWindowFrom
+from ..models.time_window_until import TimeWindowUntil
+from ..models.window import Window
+
+
+class QueryOutput(WaylayBaseModel):
+ """Query definition for a Waylay analytics query. See also [api docs](https://docs.waylay.io/#/api/query/?id=data-query-json-representation).."""
+
+ resource: StrictStr | None = Field(
+ default=None, description="Default resource for the series in the query."
+ )
+ metric: StrictStr | None = Field(
+ default=None, description="Default metric for the series in the query."
+ )
+ aggregation: DefaultAggregation | None = None
+ interpolation: DefaultInterpolation | None = None
+ freq: GroupingInterval | None = None
+ var_from: TimeWindowFrom | None = Field(default=None, alias="from")
+ until: TimeWindowUntil | None = None
+ window: Window | None = None
+ periods: StrictInt | None = Field(
+ default=None,
+ description="The size of the time window in number of `freq` units. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.",
+ )
+ align: Alignment | None = None
+ data: List[SeriesSpec] | None = Field(
+ default=None,
+ description="List of series specifications. When not specified, a single default series specification is assumed(`[{}]`, using the default `metric`,`resource`, ... ).",
+ )
+ render: Render | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_response.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_response.py
new file mode 100644
index 0000000..2283101
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_response.py
@@ -0,0 +1,44 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any, Dict, List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.message import Message
+from ..models.query_hal_links import QueryHALLinks
+from ..models.query_output import QueryOutput
+
+
+class QueryResponse(WaylayBaseModel):
+ """Represents a single named query.."""
+
+ links: QueryHALLinks = Field(alias="_links")
+ attrs: Dict[str, Any] = Field(
+ description="System provided metadata for the query definition."
+ )
+ name: StrictStr = Field(description="Name of the stored query definition.")
+ meta: Dict[str, Any] | None = Field(
+ default=None, description="User metadata for the query definition."
+ )
+ query: QueryOutput
+ messages: List[Message] | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_result.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_result.py
new file mode 100644
index 0000000..90ed5c1
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_result.py
@@ -0,0 +1,38 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.query_execution_message import QueryExecutionMessage
+from ..models.query_input import QueryInput
+from ..models.response_data_set import ResponseDataSet
+
+
+class QueryResult(WaylayBaseModel):
+ """A json data response. Uses the format as specified by the `render` options of the request (defaults to `COMPACT_WS`). '."""
+
+ data: List[ResponseDataSet] = Field(
+ description="A list of data sets, each with their own time axis. There will be one dataset for each `role` specified in the query (by default a single `input` role). The data is represented according to the `render` options in the query (default `COMPACT_WS`)."
+ )
+ query: QueryInput
+ messages: List[QueryExecutionMessage]
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/query_update_input.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_update_input.py
new file mode 100644
index 0000000..99550a3
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/query_update_input.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import Any, Dict
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.query_input import QueryInput
+
+
+class QueryUpdateInput(WaylayBaseModel):
+ """Input data to update a query definition.."""
+
+ meta: Dict[str, Any] | None = Field(
+ default=None, description="User metadata for the query definition."
+ )
+ query: QueryInput | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render.py
new file mode 100644
index 0000000..42855c4
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render.py
@@ -0,0 +1,74 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictBool,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.data_axis_option import DataAxisOption
+from ..models.header_array_option import HeaderArrayOption
+from ..models.hierarchical import Hierarchical
+from ..models.render_mode import RenderMode
+
+
+class Render(WaylayBaseModel):
+ """Configures the representation of data sets returned by the query API.."""
+
+ mode: RenderMode | None = None
+ roll_up: StrictBool | None = Field(
+ default=None,
+ description="move up attributes on rows (or columns) that are the same for all rows (or columns) to a table attribute. Levels enumerated in 'hierarchical' are excluded.",
+ )
+ hierarchical: Hierarchical | None = None
+ value_key: StrictStr | None = Field(
+ default=None,
+ description="if set, use this key in the value object to report data values",
+ )
+ show_levels: StrictBool | None = Field(
+ default=None,
+ description="if set, report the levels used in the data values (either hierarchical or flat)",
+ )
+ iso_timestamp: StrictBool | None = Field(
+ default=None,
+ description="if set, render timestamps in a row or column index with both epoch and iso representations",
+ )
+ row_key: StrictStr | None = Field(
+ default=None,
+ description="if set, use this key as name of the row-dimension for single-dimensional rows",
+ )
+ column_key: StrictStr | None = Field(
+ default=None,
+ description="if set, use this key as name of the column-dimension for single-dimensional columns",
+ )
+ header_array: HeaderArrayOption | None = None
+ data_axis: DataAxisOption | None = None
+ key_seperator: StrictStr | None = Field(
+ default=None,
+ description="character used to concatenate multi-key columns or rows when required",
+ )
+ key_skip_empty: StrictBool | None = Field(
+ default=None,
+ description="skip empty values in concatenating multi-key column or row headers",
+ )
+ include_window_spec: StrictBool | None = Field(
+ default=None,
+ description="if set, include window specification in render modes that support it",
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render1.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render1.py
new file mode 100644
index 0000000..87ba513
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render1.py
@@ -0,0 +1,26 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.render import Render
+from ..models.render_mode import RenderMode
+
+Render1 = Union[Annotated[RenderMode, ""], Annotated[Render, ""]]
+"""Render1."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode.py
new file mode 100644
index 0000000..1de5aeb
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode.py
@@ -0,0 +1,45 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.render_mode_one_of import RenderModeOneOf
+from ..models.render_mode_one_of1 import RenderModeOneOf1
+from ..models.render_mode_one_of2 import RenderModeOneOf2
+from ..models.render_mode_one_of3 import RenderModeOneOf3
+from ..models.render_mode_one_of4 import RenderModeOneOf4
+from ..models.render_mode_one_of5 import RenderModeOneOf5
+from ..models.render_mode_one_of6 import RenderModeOneOf6
+from ..models.render_mode_one_of7 import RenderModeOneOf7
+from ..models.render_mode_one_of8 import RenderModeOneOf8
+from ..models.render_mode_one_of9 import RenderModeOneOf9
+
+RenderMode = Union[
+ Annotated[RenderModeOneOf, ""],
+ Annotated[RenderModeOneOf1, ""],
+ Annotated[RenderModeOneOf2, ""],
+ Annotated[RenderModeOneOf3, ""],
+ Annotated[RenderModeOneOf4, ""],
+ Annotated[RenderModeOneOf5, ""],
+ Annotated[RenderModeOneOf6, ""],
+ Annotated[RenderModeOneOf7, ""],
+ Annotated[RenderModeOneOf8, ""],
+ Annotated[RenderModeOneOf9, ""],
+]
+"""Render mode configuration keys.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of.py
new file mode 100644
index 0000000..16303f4
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf(str, Enum):
+ """Render rows of timestamp and values. Show column headers. Includes an iso timestamp. ###### options - `iso_timestamp`: `True` - `header_array`: `row` - `roll_up`: `False` - `data_axis`: `column`."""
+
+ HEADER_ROW = "HEADER_ROW"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of1.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of1.py
new file mode 100644
index 0000000..57e5c84
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of1.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf1(str, Enum):
+ """Render rows of timestamp and values. Show column headers. ###### options - `iso_timestamp`: `False` - `header_array`: `row` - `roll_up`: `False` - `data_axis`: `column`."""
+
+ COMPACT = "COMPACT"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of2.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of2.py
new file mode 100644
index 0000000..d7f808f
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of2.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf2(str, Enum):
+ """Render rows of timestamp and values. Show column headers. Show the time window attributes. ###### options - `iso_timestamp`: `False` - `header_array`: `row` - `roll_up`: `False` - `data_axis`: `column` - `include_window_spec`: `True`."""
+
+ COMPACT_WS = "COMPACT_WS"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of3.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of3.py
new file mode 100644
index 0000000..ce5946b
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of3.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf3(str, Enum):
+ """Render timestamps and each series (column) as a values array. Show column headers. ###### options - `iso_timestamp`: `False` - `header_array`: `row` - `data_axis`: `row` - `roll_up`: `True` - `include_window_spec`: `True`."""
+
+ SERIES = "SERIES"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of4.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of4.py
new file mode 100644
index 0000000..5a61f8d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of4.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf4(str, Enum):
+ """Renders row index in `rows`, and each series as a values array. The series are prefixed by their series attributes.The `rows` index is prefixed by the labels for these attributes. ###### options - `iso_timestamp`: `True` - `header_array`: `column` - `roll_up`: `False` - `data_axis`: `row`."""
+
+ HEADER_COLUMN = "HEADER_COLUMN"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of5.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of5.py
new file mode 100644
index 0000000..a26a576
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of5.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf5(str, Enum):
+ """Render an object for each observation. Uses flattened keys. ###### options - `iso_timestamp`: `True` - `hierarchical`: `False` - `show_levels`: `True` - `roll_up`: `False`."""
+
+ FLAT_DICT = "FLAT_DICT"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of6.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of6.py
new file mode 100644
index 0000000..c71b67d
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of6.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf6(str, Enum):
+ """Render an hierarchical object for each observation. Shows an iso timestamp. ###### options - `iso_timestamp`: `True` - `hierarchical`: `True` - `show_levels`: `True` - `roll_up`: `True`."""
+
+ HIER_DICT = "HIER_DICT"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of7.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of7.py
new file mode 100644
index 0000000..2151afe
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of7.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf7(str, Enum):
+ """Render an object with metric keys for each observation. Shows an iso timestamp. ###### options - `iso_timestamp`: `True` - `hierarchical`: `['metric']` - `show_levels`: `False` - `roll_up`: `True` - `key_skip_empty`: `True`."""
+
+ METRIC_FLAT_DICT = "METRIC_FLAT_DICT"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of8.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of8.py
new file mode 100644
index 0000000..b04bb87
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of8.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf8(str, Enum):
+ """Render in an object format compatible with the `/data/v1/events` upload. ###### options - `iso_timestamp`: `False` - `hierarchical`: `False` - `show_levels`: `False` - `roll_up`: `True`."""
+
+ UPLOAD = "UPLOAD"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of9.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of9.py
new file mode 100644
index 0000000..97a77e8
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/render_mode_one_of9.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RenderModeOneOf9(str, Enum):
+ """Render in csv format with row headers. ###### options - `iso_timestamp`: `False`."""
+
+ CSV = "CSV"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/response_data_set.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/response_data_set.py
new file mode 100644
index 0000000..95499cd
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/response_data_set.py
@@ -0,0 +1,33 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.column_data_set import ColumnDataSet
+from ..models.object_data_set import ObjectDataSet
+from ..models.row_data_set import RowDataSet
+from ..models.series_data_set import SeriesDataSet
+
+ResponseDataSet = Union[
+ Annotated[RowDataSet, ""],
+ Annotated[SeriesDataSet, ""],
+ Annotated[ColumnDataSet, ""],
+ Annotated[ObjectDataSet, ""],
+]
+"""Result timeseries data set, with one time dimension.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/row_data_set.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_data_set.py
new file mode 100644
index 0000000..70a8be9
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_data_set.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.column_headers_inner import ColumnHeadersInner
+from ..models.data_set_attributes import DataSetAttributes
+from ..models.data_set_window import DataSetWindow
+from ..models.datum import Datum
+from ..models.row_data_set_data_axis import RowDataSetDataAxis
+
+
+class RowDataSet(WaylayBaseModel):
+ """Row-oriented dataset. Timeseries data layout with a column header and a data row per timestamp. Result for render options `data_axis=column` and `header_array=row`.\",."""
+
+ attributes: DataSetAttributes | None = None
+ window_spec: DataSetWindow | None = None
+ data_axis: RowDataSetDataAxis | None = RowDataSetDataAxis.COLUMN
+ columns: List[ColumnHeadersInner] = Field(
+ description="Header Attributes for the column data. The initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps). The remaining object-valued column headers identify and describe the actual series data."
+ )
+ data: List[List[Datum]]
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/row_data_set_data_axis.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_data_set_data_axis.py
new file mode 100644
index 0000000..6fbba4f
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_data_set_data_axis.py
@@ -0,0 +1,23 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class RowDataSetDataAxis(str, Enum):
+ """RowDataSetDataAxis."""
+
+ COLUMN = "column"
+
+ def __str__(self) -> str:
+ return str(self.value)
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/row_header.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_header.py
new file mode 100644
index 0000000..107b5fb
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_header.py
@@ -0,0 +1,35 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+class RowHeader(WaylayBaseModel):
+ """Index entry attributes. Attributes for a timestamp index entry.."""
+
+ timestamp: StrictInt = Field(description="Unix epoch milliseconds timestamp.")
+ timestamp_iso: datetime | None = Field(
+ default=None,
+ description="ISO8601 rendering of the timestamp, present when `render.iso_timestamp=true`",
+ )
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/row_headers_inner.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_headers_inner.py
new file mode 100644
index 0000000..13aa8b4
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/row_headers_inner.py
@@ -0,0 +1,27 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+from ..models.row_header import RowHeader
+
+RowHeadersInner = Union[
+ Annotated[str, "Label for a series attribute"], Annotated[RowHeader, ""]
+]
+"""RowHeadersInner."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/series_data_set.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/series_data_set.py
new file mode 100644
index 0000000..0866eb3
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/series_data_set.py
@@ -0,0 +1,42 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ Field,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.column_data_set_data_axis import ColumnDataSetDataAxis
+from ..models.column_headers_inner import ColumnHeadersInner
+from ..models.data_set_attributes import DataSetAttributes
+from ..models.data_set_window import DataSetWindow
+from ..models.datum import Datum
+
+
+class SeriesDataSet(WaylayBaseModel):
+ """Column-oriented dataset. Timeseries data layout with a column header and a seperate data array for the time index and each series. Result for render options `data_axis=row` and `header_array=row`.."""
+
+ attributes: DataSetAttributes | None = None
+ window_spec: DataSetWindow | None = None
+ data_axis: ColumnDataSetDataAxis | None = ColumnDataSetDataAxis.ROW
+ columns: List[ColumnHeadersInner] = Field(
+ description="Header Attributes for the column data. The initial string-valued headers (normally a single `timestamp`) indicate that column to contain row index data (i.e. timestamps). The remaining object-valued column headers identify and describe the actual series data."
+ )
+ data: List[List[Datum]]
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/series_spec.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/series_spec.py
new file mode 100644
index 0000000..5328d6e
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/series_spec.py
@@ -0,0 +1,45 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.aggregation_method import AggregationMethod
+from ..models.interpolation import Interpolation
+
+
+class SeriesSpec(WaylayBaseModel):
+ """Query specification for a single series.."""
+
+ name: StrictStr | None = Field(
+ default=None,
+ description="Optional alias name for the series. This name is used when exporting the dataset to CSV format.",
+ )
+ resource: StrictStr | None = Field(
+ default=None,
+ description="Resource id for the series, required unless it is specified as a query default.",
+ )
+ metric: StrictStr | None = Field(
+ default=None,
+ description="Metric name for the series, required unless it is specified as a query default.",
+ )
+ aggregration: AggregationMethod | None = None
+ interpolation: Interpolation | None = None
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="allow"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/time_window_from.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/time_window_from.py
new file mode 100644
index 0000000..e7d4adc
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/time_window_from.py
@@ -0,0 +1,34 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+TimeWindowFrom = Union[
+ Annotated[
+ datetime,
+ "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ ],
+ Annotated[int, "Absolute timestamp milliseconds in unix epoch since 1970-01-01."],
+ Annotated[
+ str,
+ "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ],
+]
+"""The start of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/time_window_until.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/time_window_until.py
new file mode 100644
index 0000000..4cc057b
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/time_window_until.py
@@ -0,0 +1,34 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+TimeWindowUntil = Union[
+ Annotated[
+ datetime,
+ "A date or date-time in [ISO8601](https://en.wikipedia.org/wiki/ISO_8601#Combined_date_and_time_representations) format. When no timezone is specified, the UTC timezone is assumed (`+00:00`)",
+ ],
+ Annotated[int, "Absolute timestamp milliseconds in unix epoch since 1970-01-01."],
+ Annotated[
+ str,
+ "Specifies a timestamp before _now_ as a period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ],
+]
+"""The end (not-inclusive) of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties)specifiers.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/validation_error.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/validation_error.py
new file mode 100644
index 0000000..b0b3f26
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/validation_error.py
@@ -0,0 +1,34 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import (
+ ConfigDict,
+ StrictStr,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+from ..models.location_inner import LocationInner
+
+
+class ValidationError(WaylayBaseModel):
+ """ValidationError."""
+
+ loc: List[LocationInner]
+ msg: StrictStr
+ type: StrictStr
+
+ model_config = ConfigDict(
+ populate_by_name=True, protected_namespaces=(), extra="ignore"
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/window.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/window.py
new file mode 100644
index 0000000..abab347
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/window.py
@@ -0,0 +1,28 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+Window = Union[
+ Annotated[
+ str,
+ "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ]
+]
+"""The absolute size of the time window for which results will be returned. One of the [time line](https://docs.waylay.io/#/api/query/?id=time-line-properties) specifiers.."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/models/window_override.py b/waylay-sdk-queries-types/src/waylay/services/queries/models/window_override.py
new file mode 100644
index 0000000..3e90693
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/models/window_override.py
@@ -0,0 +1,28 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) models.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+
+"""
+
+from __future__ import annotations
+
+from typing import (
+ Union,
+)
+
+from typing_extensions import (
+ Annotated, # >=3.9
+)
+
+WindowOverride = Union[
+ Annotated[
+ str,
+ "A period in [ISO8601 duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format.",
+ ]
+]
+"""WindowOverride."""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/__init__.py b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__init__.py
new file mode 100644
index 0000000..3522154
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__init__.py
@@ -0,0 +1,13 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol): Query Parameters.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+version: 0.5.0
+
+ Execute and store queries on the Waylay timeseries. Protocol version: v1.
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/__init__.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..d71aeee
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/__init__.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/execute_api.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/execute_api.cpython-311.pyc
new file mode 100644
index 0000000..e5308f3
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/execute_api.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/manage_api.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/manage_api.cpython-311.pyc
new file mode 100644
index 0000000..504db06
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/manage_api.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/status_api.cpython-311.pyc b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/status_api.cpython-311.pyc
new file mode 100644
index 0000000..3ecc275
Binary files /dev/null and b/waylay-sdk-queries-types/src/waylay/services/queries/queries/__pycache__/status_api.cpython-311.pyc differ
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/execute_api.py b/waylay-sdk-queries-types/src/waylay/services/queries/queries/execute_api.py
new file mode 100644
index 0000000..7fbfc87
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/queries/execute_api.py
@@ -0,0 +1,128 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) query parameters.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+from __future__ import annotations # for Python 3.7–3.9
+
+from typing import Any
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+ StrictStr,
+)
+from typing_extensions import (
+ Annotated, # >=3.11
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+def _execute_by_name_query_alias_for(field_name: str) -> str:
+ if field_name == "resource":
+ return "resource"
+ if field_name == "metric":
+ return "metric"
+ if field_name == "aggregation":
+ return "aggregation"
+ if field_name == "interpolation":
+ return "interpolation"
+ if field_name == "freq":
+ return "freq"
+ if field_name == "var_from":
+ return "from"
+ if field_name == "until":
+ return "until"
+ if field_name == "window":
+ return "window"
+ if field_name == "periods":
+ return "periods"
+ if field_name == "render":
+ return "render"
+ return field_name
+
+
+class ExecuteByNameQuery(WaylayBaseModel):
+ """Model for `execute_by_name` query parameters."""
+
+ resource: Annotated[
+ StrictStr | None, Field(description="Default Resource Override.")
+ ] = None
+ metric: Annotated[
+ StrictStr | None, Field(description="Default Metric Override.")
+ ] = None
+ aggregation: StrictStr | None = None
+ interpolation: Any | None = None
+ freq: Annotated[
+ StrictStr | None, Field(description="Override for the `freq` query attribute.")
+ ] = None
+ var_from: StrictStr | None = None
+ until: StrictStr | None = None
+ window: StrictStr | None = None
+ periods: StrictInt | None = None
+ render: Any | None = None
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_execute_by_name_query_alias_for,
+ populate_by_name=True,
+ )
+
+
+def _execute_query_alias_for(field_name: str) -> str:
+ if field_name == "resource":
+ return "resource"
+ if field_name == "metric":
+ return "metric"
+ if field_name == "aggregation":
+ return "aggregation"
+ if field_name == "interpolation":
+ return "interpolation"
+ if field_name == "freq":
+ return "freq"
+ if field_name == "var_from":
+ return "from"
+ if field_name == "until":
+ return "until"
+ if field_name == "window":
+ return "window"
+ if field_name == "periods":
+ return "periods"
+ if field_name == "render":
+ return "render"
+ return field_name
+
+
+class ExecuteQuery(WaylayBaseModel):
+ """Model for `execute` query parameters."""
+
+ resource: Annotated[
+ StrictStr | None, Field(description="Default Resource Override.")
+ ] = None
+ metric: Annotated[
+ StrictStr | None, Field(description="Default Metric Override.")
+ ] = None
+ aggregation: StrictStr | None = None
+ interpolation: Any | None = None
+ freq: Annotated[
+ StrictStr | None, Field(description="Override for the `freq` query attribute.")
+ ] = None
+ var_from: StrictStr | None = None
+ until: StrictStr | None = None
+ window: StrictStr | None = None
+ periods: StrictInt | None = None
+ render: Any | None = None
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_execute_query_alias_for,
+ populate_by_name=True,
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/manage_api.py b/waylay-sdk-queries-types/src/waylay/services/queries/queries/manage_api.py
new file mode 100644
index 0000000..4d093c2
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/queries/manage_api.py
@@ -0,0 +1,120 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) query parameters.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+from __future__ import annotations # for Python 3.7–3.9
+
+from pydantic import (
+ ConfigDict,
+ Field,
+ StrictInt,
+ StrictStr,
+)
+from typing_extensions import (
+ Annotated, # >=3.11
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+def _create_query_alias_for(field_name: str) -> str:
+ return field_name
+
+
+class CreateQuery(WaylayBaseModel):
+ """Model for `create` query parameters."""
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_create_query_alias_for,
+ populate_by_name=True,
+ )
+
+
+def _get_query_alias_for(field_name: str) -> str:
+ return field_name
+
+
+class GetQuery(WaylayBaseModel):
+ """Model for `get` query parameters."""
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_get_query_alias_for,
+ populate_by_name=True,
+ )
+
+
+def _list_query_alias_for(field_name: str) -> str:
+ if field_name == "q":
+ return "q"
+ if field_name == "limit":
+ return "limit"
+ if field_name == "offset":
+ return "offset"
+ return field_name
+
+
+class ListQuery(WaylayBaseModel):
+ """Model for `list` query parameters."""
+
+ q: Annotated[
+ StrictStr | None,
+ Field(
+ description="The QDSL filter condition for the stored queries. Note that this value needs to be escaped when passed as an url paramater."
+ ),
+ ] = None
+ limit: Annotated[
+ Annotated[int, Field(le=100, strict=True)] | None,
+ Field(description="Maximal number of items return in one response."),
+ ] = None
+ offset: Annotated[
+ StrictInt | None,
+ Field(
+ description="Numbers of items to skip before listing results in the response page."
+ ),
+ ] = None
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_list_query_alias_for,
+ populate_by_name=True,
+ )
+
+
+def _remove_query_alias_for(field_name: str) -> str:
+ return field_name
+
+
+class RemoveQuery(WaylayBaseModel):
+ """Model for `remove` query parameters."""
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_remove_query_alias_for,
+ populate_by_name=True,
+ )
+
+
+def _update_query_alias_for(field_name: str) -> str:
+ return field_name
+
+
+class UpdateQuery(WaylayBaseModel):
+ """Model for `update` query parameters."""
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_update_query_alias_for,
+ populate_by_name=True,
+ )
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/py.typed b/waylay-sdk-queries-types/src/waylay/services/queries/queries/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/waylay-sdk-queries-types/src/waylay/services/queries/queries/status_api.py b/waylay-sdk-queries-types/src/waylay/services/queries/queries/status_api.py
new file mode 100644
index 0000000..6ae43c3
--- /dev/null
+++ b/waylay-sdk-queries-types/src/waylay/services/queries/queries/status_api.py
@@ -0,0 +1,31 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) query parameters.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+from __future__ import annotations # for Python 3.7–3.9
+
+from pydantic import (
+ ConfigDict,
+)
+from waylay.sdk.api._models import BaseModel as WaylayBaseModel
+
+
+def _get_query_alias_for(field_name: str) -> str:
+ return field_name
+
+
+class GetQuery(WaylayBaseModel):
+ """Model for `get` query parameters."""
+
+ model_config = ConfigDict(
+ protected_namespaces=(),
+ extra="allow",
+ alias_generator=_get_query_alias_for,
+ populate_by_name=True,
+ )
diff --git a/waylay-sdk-queries/.openapi-generator/FILES b/waylay-sdk-queries/.openapi-generator/FILES
new file mode 100644
index 0000000..7d04e8a
--- /dev/null
+++ b/waylay-sdk-queries/.openapi-generator/FILES
@@ -0,0 +1,10 @@
+LICENSE.txt
+pyproject.toml
+src/waylay/services/queries/api/__init__.py
+src/waylay/services/queries/api/execute_api.py
+src/waylay/services/queries/api/manage_api.py
+src/waylay/services/queries/api/py.typed
+src/waylay/services/queries/api/status_api.py
+src/waylay/services/queries/service/__init__.py
+src/waylay/services/queries/service/py.typed
+src/waylay/services/queries/service/service.py
diff --git a/waylay-sdk-queries/.openapi-generator/VERSION b/waylay-sdk-queries/.openapi-generator/VERSION
new file mode 100644
index 0000000..1985849
--- /dev/null
+++ b/waylay-sdk-queries/.openapi-generator/VERSION
@@ -0,0 +1 @@
+7.7.0
diff --git a/waylay-sdk-queries/LICENSE.txt b/waylay-sdk-queries/LICENSE.txt
new file mode 100644
index 0000000..2796771
--- /dev/null
+++ b/waylay-sdk-queries/LICENSE.txt
@@ -0,0 +1,13 @@
+ISC License (ISC)
+Copyright 2024, Waylay
+
+Permission to use, copy, modify, and/or distribute this software for any purpose
+with or without fee is hereby granted, provided that the above copyright notice
+and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
\ No newline at end of file
diff --git a/waylay-sdk-queries/README.md b/waylay-sdk-queries/README.md
new file mode 100644
index 0000000..b1d3f06
--- /dev/null
+++ b/waylay-sdk-queries/README.md
@@ -0,0 +1,66 @@
+# Waylay Queries Service
+
+Execute and store queries on the Waylay timeseries.
+
+Protocol version: v1.
+
+This Python package is automatically generated based on the
+Waylay Queries OpenAPI specification (API version: 0.5.0)
+For more information, please visit [the openapi specification](https://docs.waylay.io/openapi/public/redocly/queries.html).
+
+It consists of a plugin for the waylay-sdk-core package, and contains the Queries api methods.
+Note that the typed model classes for all path params, query params, body params and responses for each of the api methods are contained in a separate package called waylay-sdk-queries-types.
+
+## Requirements.
+This package requires Python 3.9+.
+
+## Installation
+
+Normally this package is installed together with support for other services using the [waylay-sdk](https://pypi.org/project/waylay-sdk/) umbrella package:
+* `pip install waylay-sdk` will install `waylay-sdk-queries` together with the SDK api packages for other services.
+* `pip install waylay-sdk[types-queries]` will additionally install the types package `waylay-sdk-queries-types`.
+* `pip install waylay-sdk[types]` will install the types packages for this and all other services.
+
+Alternatively, you can install support for this _queries_ service only, installing or extending an existing [waylay-sdk-core](https://pypi.org/project/waylay-sdk-core/):
+
+- `pip install waylay-sdk-queries` to only install api support for _queries_.
+- `pip install waylay-sdk-queries[types]` to additionally install type support for _queries_.
+
+## Usage
+
+```python
+from pprint import pprint
+
+# Import the waylay-client from the waylay-sdk-core package
+from waylay.sdk.client import WaylayClient
+from waylay.sdk.api.api_exceptions import ApiError
+
+# Intialize a waylay client instance
+waylay_client = WaylayClient.from_profile()
+
+# Note that the typed model classes for responses/parameters/... are only available when `waylay-sdk-queries-types` is installed
+from waylay.services.queries.models.query_input import QueryInput
+from waylay.services.queries.models.query_result import QueryResult
+try:
+ # Execute Query
+ # calls `POST /queries/v1/queries/v1/data`
+ api_response = await waylay_client.queries.execute.execute(
+ # query parameters:
+ query = {
+ 'resource': '13efb488-75ac-4dac-828a-d49c5c2ebbfc'
+ 'metric': 'temperature'
+ },
+ # json data: use a generated model or a json-serializable python data structure (dict, list)
+ json = waylay.services.queries.QueryInput() # QueryInput |
+ headers = {
+ 'accept': 'accept_example',
+ },
+ )
+ print("The response of queries.execute.execute:\n")
+ pprint(api_response)
+except ApiError as e:
+ print("Exception when calling queries.execute.execute: %s\n" % e)
+```
+
+
+For more information, please visit the [Waylay API documentation](https://docs.waylay.io/#/api/?id=software-development-kits).
\ No newline at end of file
diff --git a/waylay-sdk-queries/pyproject.toml b/waylay-sdk-queries/pyproject.toml
new file mode 100644
index 0000000..c5cb80a
--- /dev/null
+++ b/waylay-sdk-queries/pyproject.toml
@@ -0,0 +1,76 @@
+[build-system]
+requires = ["setuptools >= 61.0"]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "waylay-sdk-queries"
+version = "0.5.0.20240802"
+description = "Waylay Query: timeseries queries (v1 protocol)"
+authors = [
+ { name = "Waylay", email = "info@waylay.io"}
+]
+keywords = ["Waylay Query: timeseries queries (v1 protocol)"]
+requires-python = ">= 3.9"
+dependencies = [
+ "waylay-sdk-core ~= 0.2.3",
+ "pydantic ~= 2.6",
+ "typing-extensions ~= 4.10",
+ "eval-type-backport ~= 0.1.3; python_version < '3.10'",
+]
+readme = "README.md"
+license={file = "LICENSE.txt"}
+
+[project.urls]
+Homepage = "https://www.waylay.io/"
+Documentation = "https://docs.waylay.io/#/api/?id=software-development-kits"
+Repository = "https://github.com/waylayio/waylay-sdk-queries-py.git"
+"Openapi Specification" = "https://docs.waylay.io/openapi/public/redocly/queries.html"
+
+[project.optional-dependencies]
+dev = [
+ "mypy",
+ "ruff",
+ "types-python-jose",
+ "types-appdirs",
+ "types-python-dateutil",
+ "pytest",
+ "pytest-mock",
+ "pytest-httpx",
+ "pytest-asyncio",
+ "starlette",
+ "python-multipart",
+ "typeguard",
+ "pyyaml",
+ "jsf >= 0.11.1",
+]
+
+types = ["waylay-sdk-queries-types"]
+
+[project.entry-points.dynamic]
+"waylay_sdk_plugins"= "waylay.services.queries.service:PLUGINS"
+
+[tool.setuptools.packages.find]
+where = ["src/"]
+namespaces = true
+
+[tool.ruff]
+include = ["pyproject.toml", "src/**/*.py"]
+
+[tool.ruff.lint]
+# allow duplicate imports
+ignore=["F811"]
+# https://docs.astral.sh/ruff/rules
+select= [
+ "UP007", "FA102", # convert Union to | (pep-604)
+ "I001", "F401", # sort and remove unused imports
+ "PIE790", # remove unnecessary pass statements
+ "E303", # too many blank lines
+]
+
+[tool.ruff.lint.per-file-ignores]
+# do not touch imports here
+"__init__.py" = ["F401"]
+"conftest.py" = ["F401"]
+
+[tool.pytest.ini_options]
+asyncio_mode = "auto"
\ No newline at end of file
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/__init__.py b/waylay-sdk-queries/src/waylay/services/queries/api/__init__.py
new file mode 100644
index 0000000..c843532
--- /dev/null
+++ b/waylay-sdk-queries/src/waylay/services/queries/api/__init__.py
@@ -0,0 +1,12 @@
+"""Waylay Query: timeseries queries (v1 protocol): apis."""
+
+# import apis into api package
+from .execute_api import ExecuteApi
+from .manage_api import ManageApi
+from .status_api import StatusApi
+
+__all__ = [
+ "ExecuteApi",
+ "ManageApi",
+ "StatusApi",
+]
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/__init__.cpython-311.pyc b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..b49069d
Binary files /dev/null and b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/__init__.cpython-311.pyc differ
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/execute_api.cpython-311.pyc b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/execute_api.cpython-311.pyc
new file mode 100644
index 0000000..0f6fd02
Binary files /dev/null and b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/execute_api.cpython-311.pyc differ
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/manage_api.cpython-311.pyc b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/manage_api.cpython-311.pyc
new file mode 100644
index 0000000..b31d53e
Binary files /dev/null and b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/manage_api.cpython-311.pyc differ
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/status_api.cpython-311.pyc b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/status_api.cpython-311.pyc
new file mode 100644
index 0000000..34ff651
Binary files /dev/null and b/waylay-sdk-queries/src/waylay/services/queries/api/__pycache__/status_api.cpython-311.pyc differ
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/execute_api.py b/waylay-sdk-queries/src/waylay/services/queries/api/execute_api.py
new file mode 100644
index 0000000..11480bf
--- /dev/null
+++ b/waylay-sdk-queries/src/waylay/services/queries/api/execute_api.py
@@ -0,0 +1,428 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) api.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+from __future__ import annotations # for Python 3.7–3.9
+
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Literal,
+ TypeVar,
+ overload,
+)
+
+from pydantic import (
+ StrictBool,
+ StrictStr,
+ TypeAdapter,
+)
+from waylay.sdk.api import (
+ HeaderTypes,
+ QueryParamTypes,
+ Response,
+)
+from waylay.sdk.api._models import Model
+from waylay.sdk.plugin import WithApiClient
+
+if TYPE_CHECKING:
+ from waylay.services.queries.models import (
+ HTTPValidationError,
+ QueryInput,
+ QueryResult,
+ )
+ from waylay.services.queries.queries.execute_api import (
+ ExecuteByNameQuery,
+ ExecuteQuery,
+ )
+
+
+try:
+ from waylay.services.queries.models import (
+ HTTPValidationError,
+ QueryInput,
+ QueryResult,
+ )
+ from waylay.services.queries.queries.execute_api import (
+ ExecuteByNameQuery,
+ ExecuteQuery,
+ )
+
+ MODELS_AVAILABLE = True
+except ImportError:
+ MODELS_AVAILABLE = False
+
+ if not TYPE_CHECKING:
+ ExecuteByNameQuery = dict
+ QueryResult = Model
+
+ HTTPValidationError = Model
+
+ QueryInput = Model
+
+ ExecuteQuery = dict
+ QueryResult = Model
+
+ HTTPValidationError = Model
+
+
+T = TypeVar("T")
+
+
+class ExecuteApi(WithApiClient):
+ """ExecuteApi service methods.
+
+ NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ @overload
+ async def execute_by_name(
+ self,
+ query_name: StrictStr,
+ *,
+ query: ExecuteByNameQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResult: ...
+
+ @overload
+ async def execute_by_name(
+ self,
+ query_name: StrictStr,
+ *,
+ query: ExecuteByNameQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def execute_by_name(
+ self,
+ query_name: StrictStr,
+ *,
+ query: ExecuteByNameQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def execute_by_name(
+ self,
+ query_name: StrictStr,
+ *,
+ query: ExecuteByNameQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def execute_by_name(
+ self,
+ query_name: StrictStr,
+ *,
+ query: ExecuteByNameQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def execute_by_name(
+ self,
+ query_name: StrictStr,
+ *,
+ query: ExecuteByNameQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResult | T | Response | Model:
+ """Execute Named Query.
+
+ Execute a named timeseries query. Retrieves a stored query definition by name, applies overrides from the url parameters, and executes it.
+ :param query_name: (required)
+ :type query_name: str
+ :param query: URL Query parameters.
+ :type query: ExecuteByNameQuery | QueryParamTypes, optional
+ :param query['resource'] (dict)
query.resource (Query) : Default Resource Override.
+ :type query['resource']: str
+ :param query['metric'] (dict)
query.metric (Query) : Default Metric Override.
+ :type query['metric']: str
+ :param query['aggregation'] (dict)
query.aggregation (Query) :
+ :type query['aggregation']: AggregationMethod
+ :param query['interpolation'] (dict)
query.interpolation (Query) :
+ :type query['interpolation']: Interpolation
+ :param query['freq'] (dict)
query.freq (Query) : Override for the `freq` query attribute.
+ :type query['freq']: GroupingIntervalOverride
+ :param query['from'] (dict)
query.var_from (Query) :
+ :type query['from']: FromOverride
+ :param query['until'] (dict)
query.until (Query) :
+ :type query['until']: FromOverride
+ :param query['window'] (dict)
query.window (Query) :
+ :type query['window']: WindowOverride
+ :param query['periods'] (dict)
query.periods (Query) :
+ :type query['periods']: int
+ :param query['render'] (dict)
query.render (Query) :
+ :type query['render']: Render1
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {
+ "query_name": str(query_name),
+ }
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(ExecuteByNameQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": QueryResult if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="GET",
+ resource_path="/queries/v1/queries/v1/data/{query_name}",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
+
+ @overload
+ async def execute(
+ self,
+ *,
+ json: QueryInput,
+ query: ExecuteQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResult: ...
+
+ @overload
+ async def execute(
+ self,
+ *,
+ json: QueryInput,
+ query: ExecuteQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def execute(
+ self,
+ *,
+ json: QueryInput,
+ query: ExecuteQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def execute(
+ self,
+ *,
+ json: QueryInput,
+ query: ExecuteQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def execute(
+ self,
+ *,
+ json: QueryInput,
+ query: ExecuteQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def execute(
+ self,
+ *,
+ json: QueryInput,
+ query: ExecuteQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResult | T | Response | Model:
+ """Execute Query.
+
+ Execute a timeseries query. Executes the timeseries query specified in the request body, after applying any overrides from the url parameters.
+ :param json: The json request body.
+ :type json: QueryInput, optional
+ :param query: URL Query parameters.
+ :type query: ExecuteQuery | QueryParamTypes, optional
+ :param query['resource'] (dict)
query.resource (Query) : Default Resource Override.
+ :type query['resource']: str
+ :param query['metric'] (dict)
query.metric (Query) : Default Metric Override.
+ :type query['metric']: str
+ :param query['aggregation'] (dict)
query.aggregation (Query) :
+ :type query['aggregation']: AggregationMethod
+ :param query['interpolation'] (dict)
query.interpolation (Query) :
+ :type query['interpolation']: Interpolation
+ :param query['freq'] (dict)
query.freq (Query) : Override for the `freq` query attribute.
+ :type query['freq']: GroupingIntervalOverride
+ :param query['from'] (dict)
query.var_from (Query) :
+ :type query['from']: FromOverride
+ :param query['until'] (dict)
query.until (Query) :
+ :type query['until']: FromOverride
+ :param query['window'] (dict)
query.window (Query) :
+ :type query['window']: WindowOverride
+ :param query['periods'] (dict)
query.periods (Query) :
+ :type query['periods']: int
+ :param query['render'] (dict)
query.render (Query) :
+ :type query['render']: Render1
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {}
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+ if json is not None and validate_request:
+ body_adapter: Any = TypeAdapter(QueryInput)
+ json = body_adapter.validate_python(json) # type: ignore # https://github.com/pydantic/pydantic/discussions/7094
+ body_args["json"] = json
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(ExecuteQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": QueryResult if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="POST",
+ resource_path="/queries/v1/queries/v1/data",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/manage_api.py b/waylay-sdk-queries/src/waylay/services/queries/api/manage_api.py
new file mode 100644
index 0000000..150991e
--- /dev/null
+++ b/waylay-sdk-queries/src/waylay/services/queries/api/manage_api.py
@@ -0,0 +1,915 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) api.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+from __future__ import annotations # for Python 3.7–3.9
+
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Literal,
+ TypeVar,
+ overload,
+)
+
+from pydantic import (
+ Field,
+ StrictBool,
+ StrictStr,
+ TypeAdapter,
+)
+from typing_extensions import (
+ Annotated, # >=3.9,
+)
+from waylay.sdk.api import (
+ HeaderTypes,
+ QueryParamTypes,
+ Response,
+)
+from waylay.sdk.api._models import Model
+from waylay.sdk.plugin import WithApiClient
+
+if TYPE_CHECKING:
+ from waylay.services.queries.models import (
+ DeleteResponse,
+ HTTPValidationError,
+ QueriesListResponse,
+ QueryDefinition,
+ QueryEntityInput,
+ QueryResponse,
+ )
+ from waylay.services.queries.queries.manage_api import (
+ CreateQuery,
+ GetQuery,
+ ListQuery,
+ RemoveQuery,
+ UpdateQuery,
+ )
+
+
+try:
+ from waylay.services.queries.models import (
+ DeleteResponse,
+ HTTPValidationError,
+ QueriesListResponse,
+ QueryDefinition,
+ QueryEntityInput,
+ QueryResponse,
+ )
+ from waylay.services.queries.queries.manage_api import (
+ CreateQuery,
+ GetQuery,
+ ListQuery,
+ RemoveQuery,
+ UpdateQuery,
+ )
+
+ MODELS_AVAILABLE = True
+except ImportError:
+ MODELS_AVAILABLE = False
+
+ if not TYPE_CHECKING:
+ QueryEntityInput = Model
+
+ CreateQuery = dict
+ QueryResponse = Model
+
+ HTTPValidationError = Model
+
+ GetQuery = dict
+ QueryResponse = Model
+
+ HTTPValidationError = Model
+
+ ListQuery = dict
+ QueriesListResponse = Model
+
+ HTTPValidationError = Model
+
+ RemoveQuery = dict
+ DeleteResponse = Model
+
+ HTTPValidationError = Model
+
+ QueryDefinition = Model
+
+ UpdateQuery = dict
+ QueryResponse = Model
+
+ HTTPValidationError = Model
+
+
+T = TypeVar("T")
+
+
+class ManageApi(WithApiClient):
+ """ManageApi service methods.
+
+ NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ @overload
+ async def create(
+ self,
+ *,
+ json: QueryEntityInput,
+ query: CreateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResponse: ...
+
+ @overload
+ async def create(
+ self,
+ *,
+ json: QueryEntityInput,
+ query: CreateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def create(
+ self,
+ *,
+ json: QueryEntityInput,
+ query: CreateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def create(
+ self,
+ *,
+ json: QueryEntityInput,
+ query: CreateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def create(
+ self,
+ *,
+ json: QueryEntityInput,
+ query: CreateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def create(
+ self,
+ *,
+ json: QueryEntityInput,
+ query: CreateQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResponse | T | Response | Model:
+ """Post Query.
+
+ Create a new named query.
+ :param json: The json request body.
+ :type json: QueryEntityInput, optional
+ :param query: URL Query parameters.
+ :type query: CreateQuery | QueryParamTypes, optional
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {}
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+ if json is not None and validate_request:
+ body_adapter: Any = TypeAdapter(QueryEntityInput)
+ json = body_adapter.validate_python(json) # type: ignore # https://github.com/pydantic/pydantic/discussions/7094
+ body_args["json"] = json
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(CreateQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": QueryResponse if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="POST",
+ resource_path="/queries/v1/queries/v1/query",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
+
+ @overload
+ async def get(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResponse: ...
+
+ @overload
+ async def get(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def get(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def get(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def get(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def get(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResponse | T | Response | Model:
+ """Get Query.
+
+ Get the definition of a named query.
+ :param query_name: Name of the stored query. (required)
+ :type query_name: str
+ :param query: URL Query parameters.
+ :type query: GetQuery | QueryParamTypes, optional
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {
+ "query_name": str(query_name),
+ }
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(GetQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": QueryResponse if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="GET",
+ resource_path="/queries/v1/queries/v1/query/{query_name}",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
+
+ @overload
+ async def list(
+ self,
+ *,
+ query: ListQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueriesListResponse: ...
+
+ @overload
+ async def list(
+ self,
+ *,
+ query: ListQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def list(
+ self,
+ *,
+ query: ListQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def list(
+ self,
+ *,
+ query: ListQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def list(
+ self,
+ *,
+ query: ListQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def list(
+ self,
+ *,
+ query: ListQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueriesListResponse | T | Response | Model:
+ """List Queries.
+
+ List named queries.
+ :param query: URL Query parameters.
+ :type query: ListQuery | QueryParamTypes, optional
+ :param query['q'] (dict)
query.q (Query) : The QDSL filter condition for the stored queries. Note that this value needs to be escaped when passed as an url paramater.
+ :type query['q']: str
+ :param query['limit'] (dict)
query.limit (Query) : Maximal number of items return in one response.
+ :type query['limit']: int
+ :param query['offset'] (dict)
query.offset (Query) : Numbers of items to skip before listing results in the response page.
+ :type query['offset']: int
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {}
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(ListQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": QueriesListResponse if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="GET",
+ resource_path="/queries/v1/queries/v1/query",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
+
+ @overload
+ async def remove(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: RemoveQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> DeleteResponse: ...
+
+ @overload
+ async def remove(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: RemoveQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def remove(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: RemoveQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def remove(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: RemoveQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def remove(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: RemoveQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def remove(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ query: RemoveQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> DeleteResponse | T | Response | Model:
+ """Remove Query.
+
+ Remove definition of a named query.
+ :param query_name: Name of the stored query. (required)
+ :type query_name: str
+ :param query: URL Query parameters.
+ :type query: RemoveQuery | QueryParamTypes, optional
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {
+ "query_name": str(query_name),
+ }
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(RemoveQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": DeleteResponse if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="DELETE",
+ resource_path="/queries/v1/queries/v1/query/{query_name}",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
+
+ @overload
+ async def update(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ json: QueryDefinition,
+ query: UpdateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResponse: ...
+
+ @overload
+ async def update(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ json: QueryDefinition,
+ query: UpdateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def update(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ json: QueryDefinition,
+ query: UpdateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def update(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ json: QueryDefinition,
+ query: UpdateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def update(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ json: QueryDefinition,
+ query: UpdateQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def update(
+ self,
+ query_name: Annotated[
+ StrictStr, Field(description="Name of the stored query.")
+ ],
+ *,
+ json: QueryDefinition,
+ query: UpdateQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> QueryResponse | T | Response | Model:
+ """Update Query.
+
+ Create or update a named query definition.
+ :param query_name: Name of the stored query. (required)
+ :type query_name: str
+ :param json: The json request body.
+ :type json: QueryDefinition, optional
+ :param query: URL Query parameters.
+ :type query: UpdateQuery | QueryParamTypes, optional
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {
+ "query_name": str(query_name),
+ }
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+ if json is not None and validate_request:
+ body_adapter: Any = TypeAdapter(QueryDefinition)
+ json = body_adapter.validate_python(json) # type: ignore # https://github.com/pydantic/pydantic/discussions/7094
+ body_args["json"] = json
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(UpdateQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": QueryResponse if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {
+ "422": HTTPValidationError,
+ }
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="PUT",
+ resource_path="/queries/v1/queries/v1/query/{query_name}",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/py.typed b/waylay-sdk-queries/src/waylay/services/queries/api/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/waylay-sdk-queries/src/waylay/services/queries/api/status_api.py b/waylay-sdk-queries/src/waylay/services/queries/api/status_api.py
new file mode 100644
index 0000000..290cb07
--- /dev/null
+++ b/waylay-sdk-queries/src/waylay/services/queries/api/status_api.py
@@ -0,0 +1,197 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol) api.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+from __future__ import annotations # for Python 3.7–3.9
+
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Dict,
+ Literal,
+ TypeVar,
+ overload,
+)
+
+from pydantic import (
+ StrictBool,
+ TypeAdapter,
+)
+from waylay.sdk.api import (
+ HeaderTypes,
+ QueryParamTypes,
+ Response,
+)
+from waylay.sdk.api._models import Model
+from waylay.sdk.plugin import WithApiClient
+
+if TYPE_CHECKING:
+ from waylay.services.queries.queries.status_api import GetQuery
+
+
+try:
+ from waylay.services.queries.queries.status_api import GetQuery
+
+ MODELS_AVAILABLE = True
+except ImportError:
+ MODELS_AVAILABLE = False
+
+ if not TYPE_CHECKING:
+ GetQuery = dict
+
+
+T = TypeVar("T")
+
+
+class StatusApi(WithApiClient):
+ """StatusApi service methods.
+
+ NOTE: This class is auto generated by OpenAPI Generator
+ Ref: https://openapi-generator.tech
+
+ Do not edit the class manually.
+ """
+
+ @overload
+ async def get(
+ self,
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Dict[str, str]: ...
+
+ @overload
+ async def get(
+ self,
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: Literal[""] = "",
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ @overload
+ async def get(
+ self,
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[True],
+ select_path: Literal["_not_used_"] = "_not_used_",
+ response_type: Literal[None] = None, # not used
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Response: ...
+
+ @overload
+ async def get(
+ self,
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: Literal[None] = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Model: ...
+
+ @overload
+ async def get(
+ self,
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: Literal[False] = False,
+ select_path: str,
+ response_type: T,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> T: ...
+
+ async def get(
+ self,
+ *,
+ query: GetQuery | QueryParamTypes | None = None,
+ raw_response: StrictBool = False,
+ select_path: str = "",
+ response_type: T | None = None,
+ validate_request: StrictBool = True,
+ headers: HeaderTypes | None = None,
+ **kwargs,
+ ) -> Dict[str, str] | T | Response | Model:
+ """Get Version And Health.
+
+ Get the version and health status for waylay-query.
+ :param query: URL Query parameters.
+ :type query: GetQuery | QueryParamTypes, optional
+ :param raw_response: If true, return the http Response object instead of returning an api model object, or throwing an ApiError.
+ :param select_path: Denotes the json path applied to the response object before returning it.
+ Set it to the empty string `""` to receive the full response object.
+ :param response_type: If specified, the response is parsed into an instance of the specified type.
+ :param validate_request: If set to false, the request body and query parameters are NOT validated against the models in the service types package, even when available.
+ :param headers: Header parameters for this request
+ :type headers: dict, optional
+ :param `**kwargs`: Additional parameters passed on to the http client.
+ See below.
+ :Keyword Arguments:
+ * timeout: a single numeric timeout in seconds,
+ or a tuple of _connect_, _read_, _write_ and _pool_ timeouts.
+ * stream: if true, the response will be in streaming mode
+ * cookies
+ * extensions
+ * auth
+ * follow_redirects: bool
+
+ :return: Returns the result object if the http request succeeded with status code '2XX'.
+ :raises APIError: If the http request has a status code different from `2XX`. This
+ object wraps both the http Response and any parsed data.
+ """
+
+ # path parameters
+ path_params: Dict[str, str] = {}
+
+ ## named body parameters
+ body_args: Dict[str, Any] = {}
+
+ # query parameters
+ if query is not None and MODELS_AVAILABLE and validate_request:
+ query = TypeAdapter(GetQuery).validate_python(query)
+
+ response_types_map: Dict[str, Any] = (
+ {"2XX": response_type}
+ if response_type is not None
+ else {
+ "200": Dict[str, str] if not select_path else Model,
+ }
+ )
+ non_200_response_types_map: Dict[str, Any] = {}
+ response_types_map.update(non_200_response_types_map)
+
+ ## peform request
+ return await self.api_client.request(
+ method="GET",
+ resource_path="/queries/v1/queries/v1",
+ path_params=path_params,
+ params=query,
+ **body_args,
+ headers=headers,
+ **kwargs,
+ response_type=response_types_map,
+ select_path=select_path,
+ raw_response=raw_response,
+ )
diff --git a/waylay-sdk-queries/src/waylay/services/queries/service/__init__.py b/waylay-sdk-queries/src/waylay/services/queries/service/__init__.py
new file mode 100644
index 0000000..54cbf3c
--- /dev/null
+++ b/waylay-sdk-queries/src/waylay/services/queries/service/__init__.py
@@ -0,0 +1,24 @@
+# coding: utf-8
+"""Waylay Query: timeseries queries (v1 protocol): Service.
+
+This code was generated from the OpenAPI documentation of 'Waylay Query: timeseries queries (v1 protocol)'
+
+version: 0.5.0
+
+ Execute and store queries on the Waylay timeseries. Protocol version: v1.
+
+Generated by OpenAPI Generator (https://openapi-generator.tech)
+
+Do not edit the class manually.
+"""
+
+__version__ = "0.5.0.20240802"
+
+from .service import QueriesService
+
+PLUGINS = [QueriesService]
+
+__all__ = [
+ "__version__",
+ "QueriesService",
+]
diff --git a/waylay-sdk-queries/src/waylay/services/queries/service/__pycache__/__init__.cpython-311.pyc b/waylay-sdk-queries/src/waylay/services/queries/service/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..f36b985
Binary files /dev/null and b/waylay-sdk-queries/src/waylay/services/queries/service/__pycache__/__init__.cpython-311.pyc differ
diff --git a/waylay-sdk-queries/src/waylay/services/queries/service/__pycache__/service.cpython-311.pyc b/waylay-sdk-queries/src/waylay/services/queries/service/__pycache__/service.cpython-311.pyc
new file mode 100644
index 0000000..e28f60e
Binary files /dev/null and b/waylay-sdk-queries/src/waylay/services/queries/service/__pycache__/service.cpython-311.pyc differ
diff --git a/waylay-sdk-queries/src/waylay/services/queries/service/py.typed b/waylay-sdk-queries/src/waylay/services/queries/service/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/waylay-sdk-queries/src/waylay/services/queries/service/service.py b/waylay-sdk-queries/src/waylay/services/queries/service/service.py
new file mode 100644
index 0000000..d084370
--- /dev/null
+++ b/waylay-sdk-queries/src/waylay/services/queries/service/service.py
@@ -0,0 +1,26 @@
+"""Queries Service."""
+
+from waylay.sdk import ApiClient, WaylayService
+
+from ..api.execute_api import ExecuteApi
+from ..api.manage_api import ManageApi
+from ..api.status_api import StatusApi
+
+
+class QueriesService(WaylayService):
+ """Queries Service Class."""
+
+ name = "queries"
+ title = "Queries Service"
+
+ execute: ExecuteApi
+ manage: ManageApi
+ status: StatusApi
+
+ def __init__(self, api_client: ApiClient):
+ """Create the queries service."""
+
+ super().__init__(api_client)
+ self.execute = ExecuteApi(api_client)
+ self.manage = ManageApi(api_client)
+ self.status = StatusApi(api_client)