From ccce41ebfce46cca2ffe4645a4b6361522c59628 Mon Sep 17 00:00:00 2001 From: Filippo Ledda Date: Thu, 3 Mar 2022 17:44:11 +0100 Subject: [PATCH] #451 Add generated model library --- libraries/models/.gitignore | 66 + libraries/models/.gitlab-ci.yml | 24 + libraries/models/.openapi-generator-ignore | 35 + libraries/models/.travis.yml | 14 + libraries/models/README.md | 49 + .../models/cloudharness_model/__init__.py | 1 + .../models/cloudharness_model/api_client.py | 860 +++++++ .../cloudharness_model/configuration.py | 446 ++++ .../models/cloudharness_model/encoder.py | 20 + .../models/cloudharness_model/exceptions.py | 159 ++ .../cloudharness_model/models/__init__.py | 32 + .../models/application_config.py | 68 + .../models/application_dependencies_config.py | 122 + .../models/application_harness_config.py | 540 +++++ .../models/application_probe.py | 144 ++ .../models/auto_artifact_spec.py | 94 + .../models/backup_config.py | 290 +++ .../cloudharness_model/models/base_model_.py | 90 + .../cloudharness_model/models/cdc_event.py | 188 ++ .../models/cdc_event_meta.py | 180 ++ .../models/cpu_memory_config.py | 90 + .../models/database_deployment_config.py | 322 +++ .../database_deployment_config_all_of.py | 262 +++ .../models/deployment_auto_artifact_config.py | 212 ++ .../models/deployment_resources_conf.py | 92 + .../models/file_resources_config.py | 128 ++ .../models/harness_main_config.py | 412 ++++ .../models/ingress_config.py | 152 ++ .../models/ingress_config_all_of.py | 92 + .../ingress_config_all_of_letsencrypt.py | 64 + .../models/jupyter_hub_config.py | 146 ++ .../cloudharness_model/models/name_value.py | 92 + .../models/orchestration_event.py | 188 ++ .../models/orchestration_event_meta.py | 180 ++ .../models/registry_config.py | 94 + .../models/service_auto_artifact_config.py | 126 + .../service_auto_artifact_config_all_of.py | 66 + .../models/uri_role_mapping_config.py | 100 + .../models/cloudharness_model/models/user.py | 534 +++++ .../models/user_credential.py | 272 +++ .../cloudharness_model/models/user_group.py | 246 ++ libraries/models/cloudharness_model/rest.py | 346 +++ .../models/cloudharness_model/schemas.py | 2045 +++++++++++++++++ .../models/cloudharness_model/typing_utils.py | 32 + libraries/models/cloudharness_model/util.py | 161 ++ libraries/models/docs/ApplicationConfig.md | 12 + .../docs/ApplicationDependenciesConfig.md | 14 + .../models/docs/ApplicationHarnessConfig.md | 29 + libraries/models/docs/ApplicationProbe.md | 15 + .../models/docs/ApplicationsConfigsMap.md | 11 + libraries/models/docs/AutoArtifactSpec.md | 13 + libraries/models/docs/BackupConfig.md | 20 + libraries/models/docs/CDCEvent.md | 17 + libraries/models/docs/CDCEventMeta.md | 16 + libraries/models/docs/CpuMemoryConfig.md | 13 + .../models/docs/DatabaseDeploymentConfig.md | 21 + .../docs/DatabaseDeploymentConfigAllOf.md | 19 + .../docs/DeploymentAutoArtifactConfig.md | 17 + .../models/docs/DeploymentResourcesConf.md | 13 + libraries/models/docs/FileResourcesConfig.md | 14 + libraries/models/docs/Filename.md | 11 + libraries/models/docs/FreeObject.md | 11 + libraries/models/docs/HarnessMainConfig.md | 24 + libraries/models/docs/IngressConfig.md | 15 + libraries/models/docs/IngressConfigAllOf.md | 13 + .../docs/IngressConfigAllOfLetsencrypt.md | 12 + libraries/models/docs/JupyterHubConfig.md | 15 + libraries/models/docs/NameValue.md | 13 + libraries/models/docs/PathSpecifier.md | 11 + libraries/models/docs/RegistryConfig.md | 13 + .../models/docs/ServiceAutoArtifactConfig.md | 14 + .../docs/ServiceAutoArtifactConfigAllOf.md | 12 + libraries/models/docs/SimpleMap.md | 11 + libraries/models/docs/UriRoleMappingConfig.md | 14 + libraries/models/docs/User.md | 30 + libraries/models/docs/UserCredential.md | 20 + libraries/models/docs/UserGroup.md | 19 + libraries/models/git_push.sh | 58 + libraries/models/requirements.txt | 11 + libraries/models/setup.cfg | 2 + libraries/models/setup.py | 10 + libraries/models/test-requirements.txt | 4 + libraries/models/test/__init__.py | 0 libraries/models/test/resources/values.yaml | 652 ++++++ libraries/models/test/test_helm_values.py | 15 + libraries/models/tox.ini | 11 + 86 files changed, 11141 insertions(+) create mode 100644 libraries/models/.gitignore create mode 100644 libraries/models/.gitlab-ci.yml create mode 100644 libraries/models/.openapi-generator-ignore create mode 100644 libraries/models/.travis.yml create mode 100644 libraries/models/README.md create mode 100644 libraries/models/cloudharness_model/__init__.py create mode 100644 libraries/models/cloudharness_model/api_client.py create mode 100644 libraries/models/cloudharness_model/configuration.py create mode 100644 libraries/models/cloudharness_model/encoder.py create mode 100644 libraries/models/cloudharness_model/exceptions.py create mode 100644 libraries/models/cloudharness_model/models/__init__.py create mode 100644 libraries/models/cloudharness_model/models/application_config.py create mode 100644 libraries/models/cloudharness_model/models/application_dependencies_config.py create mode 100644 libraries/models/cloudharness_model/models/application_harness_config.py create mode 100644 libraries/models/cloudharness_model/models/application_probe.py create mode 100644 libraries/models/cloudharness_model/models/auto_artifact_spec.py create mode 100644 libraries/models/cloudharness_model/models/backup_config.py create mode 100644 libraries/models/cloudharness_model/models/base_model_.py create mode 100644 libraries/models/cloudharness_model/models/cdc_event.py create mode 100644 libraries/models/cloudharness_model/models/cdc_event_meta.py create mode 100644 libraries/models/cloudharness_model/models/cpu_memory_config.py create mode 100644 libraries/models/cloudharness_model/models/database_deployment_config.py create mode 100644 libraries/models/cloudharness_model/models/database_deployment_config_all_of.py create mode 100644 libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py create mode 100644 libraries/models/cloudharness_model/models/deployment_resources_conf.py create mode 100644 libraries/models/cloudharness_model/models/file_resources_config.py create mode 100644 libraries/models/cloudharness_model/models/harness_main_config.py create mode 100644 libraries/models/cloudharness_model/models/ingress_config.py create mode 100644 libraries/models/cloudharness_model/models/ingress_config_all_of.py create mode 100644 libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py create mode 100644 libraries/models/cloudharness_model/models/jupyter_hub_config.py create mode 100644 libraries/models/cloudharness_model/models/name_value.py create mode 100644 libraries/models/cloudharness_model/models/orchestration_event.py create mode 100644 libraries/models/cloudharness_model/models/orchestration_event_meta.py create mode 100644 libraries/models/cloudharness_model/models/registry_config.py create mode 100644 libraries/models/cloudharness_model/models/service_auto_artifact_config.py create mode 100644 libraries/models/cloudharness_model/models/service_auto_artifact_config_all_of.py create mode 100644 libraries/models/cloudharness_model/models/uri_role_mapping_config.py create mode 100644 libraries/models/cloudharness_model/models/user.py create mode 100644 libraries/models/cloudharness_model/models/user_credential.py create mode 100644 libraries/models/cloudharness_model/models/user_group.py create mode 100644 libraries/models/cloudharness_model/rest.py create mode 100644 libraries/models/cloudharness_model/schemas.py create mode 100644 libraries/models/cloudharness_model/typing_utils.py create mode 100644 libraries/models/cloudharness_model/util.py create mode 100644 libraries/models/docs/ApplicationConfig.md create mode 100644 libraries/models/docs/ApplicationDependenciesConfig.md create mode 100644 libraries/models/docs/ApplicationHarnessConfig.md create mode 100644 libraries/models/docs/ApplicationProbe.md create mode 100644 libraries/models/docs/ApplicationsConfigsMap.md create mode 100644 libraries/models/docs/AutoArtifactSpec.md create mode 100644 libraries/models/docs/BackupConfig.md create mode 100644 libraries/models/docs/CDCEvent.md create mode 100644 libraries/models/docs/CDCEventMeta.md create mode 100644 libraries/models/docs/CpuMemoryConfig.md create mode 100644 libraries/models/docs/DatabaseDeploymentConfig.md create mode 100644 libraries/models/docs/DatabaseDeploymentConfigAllOf.md create mode 100644 libraries/models/docs/DeploymentAutoArtifactConfig.md create mode 100644 libraries/models/docs/DeploymentResourcesConf.md create mode 100644 libraries/models/docs/FileResourcesConfig.md create mode 100644 libraries/models/docs/Filename.md create mode 100644 libraries/models/docs/FreeObject.md create mode 100644 libraries/models/docs/HarnessMainConfig.md create mode 100644 libraries/models/docs/IngressConfig.md create mode 100644 libraries/models/docs/IngressConfigAllOf.md create mode 100644 libraries/models/docs/IngressConfigAllOfLetsencrypt.md create mode 100644 libraries/models/docs/JupyterHubConfig.md create mode 100644 libraries/models/docs/NameValue.md create mode 100644 libraries/models/docs/PathSpecifier.md create mode 100644 libraries/models/docs/RegistryConfig.md create mode 100644 libraries/models/docs/ServiceAutoArtifactConfig.md create mode 100644 libraries/models/docs/ServiceAutoArtifactConfigAllOf.md create mode 100644 libraries/models/docs/SimpleMap.md create mode 100644 libraries/models/docs/UriRoleMappingConfig.md create mode 100644 libraries/models/docs/User.md create mode 100644 libraries/models/docs/UserCredential.md create mode 100644 libraries/models/docs/UserGroup.md create mode 100644 libraries/models/git_push.sh create mode 100644 libraries/models/requirements.txt create mode 100644 libraries/models/setup.cfg create mode 100644 libraries/models/setup.py create mode 100644 libraries/models/test-requirements.txt create mode 100644 libraries/models/test/__init__.py create mode 100644 libraries/models/test/resources/values.yaml create mode 100644 libraries/models/test/test_helm_values.py create mode 100644 libraries/models/tox.ini diff --git a/libraries/models/.gitignore b/libraries/models/.gitignore new file mode 100644 index 00000000..43995bd4 --- /dev/null +++ b/libraries/models/.gitignore @@ -0,0 +1,66 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*,cover +.hypothesis/ +venv/ +.venv/ +.python-version +.pytest_cache + +# Translations +*.mo +*.pot + +# Django stuff: +*.log + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +#Ipython Notebook +.ipynb_checkpoints diff --git a/libraries/models/.gitlab-ci.yml b/libraries/models/.gitlab-ci.yml new file mode 100644 index 00000000..1bdad293 --- /dev/null +++ b/libraries/models/.gitlab-ci.yml @@ -0,0 +1,24 @@ +# ref: https://docs.gitlab.com/ee/ci/README.html + +stages: + - test + +.tests: + stage: test + script: + - pip install -r requirements.txt + - pip install -r test-requirements.txt + - pytest --cov=cloudharness_model + +test-3.6: + extends: .tests + image: python:3.6-alpine +test-3.7: + extends: .tests + image: python:3.7-alpine +test-3.8: + extends: .tests + image: python:3.8-alpine +test-3.9: + extends: .tests + image: python:3.9-alpine diff --git a/libraries/models/.openapi-generator-ignore b/libraries/models/.openapi-generator-ignore new file mode 100644 index 00000000..996bece9 --- /dev/null +++ b/libraries/models/.openapi-generator-ignore @@ -0,0 +1,35 @@ +# OpenAPI Generator Ignore +# Generated by openapi-generator https://github.com/openapitools/openapi-generator + +# Use this file to prevent files from being overwritten by the generator. +# The patterns follow closely to .gitignore or .dockerignore. + +# As an example, the C# client generator defines ApiClient.cs. +# You can make changes and tell OpenAPI Generator to ignore just this file by uncommenting the following line: +#ApiClient.cs + +# You can match any string of characters against a directory, file or extension with a single asterisk (*): +#foo/*/qux +# The above matches foo/bar/qux and foo/baz/qux, but not foo/bar/baz/qux + +# You can recursively match patterns against a directory, file or extension with a double asterisk (**): +#foo/**/qux +# This matches foo/bar/qux, foo/baz/qux, and foo/bar/baz/qux + +# You can also negate patterns with an exclamation (!). +# For example, you can ignore all files in a docs folder with the file extension .md: +#docs/*.md +# Then explicitly reverse the ignore rule for a single file: +#!docs/README.md +test/* +**/controllers/* +Dockerfile +**/__main__.py +**/openapi/* +setup.py +**/test/* +.dockerignore +*/__init__.py +encoder.py +*.sh +util.py \ No newline at end of file diff --git a/libraries/models/.travis.yml b/libraries/models/.travis.yml new file mode 100644 index 00000000..ad71ee5c --- /dev/null +++ b/libraries/models/.travis.yml @@ -0,0 +1,14 @@ +# ref: https://docs.travis-ci.com/user/languages/python +language: python +python: + - "3.2" + - "3.3" + - "3.4" + - "3.5" + - "3.6" + - "3.7" + - "3.8" +# command to install dependencies +install: "pip install -r requirements.txt" +# command to run tests +script: nosetests diff --git a/libraries/models/README.md b/libraries/models/README.md new file mode 100644 index 00000000..4d60f9b9 --- /dev/null +++ b/libraries/models/README.md @@ -0,0 +1,49 @@ +# OpenAPI generated server + +## Overview +This server was generated by the [OpenAPI Generator](https://openapi-generator.tech) project. By using the +[OpenAPI-Spec](https://openapis.org) from a remote server, you can easily generate a server stub. This +is an example of building a OpenAPI-enabled Flask server. + +This example uses the [Connexion](https://github.com/zalando/connexion) library on top of Flask. + +## Requirements +Python 3.5.2+ + +## Usage +To run the server, please execute the following from the root directory: + +``` +pip3 install -r requirements.txt +python3 -m cloudharness_model +``` + +and open your browser to here: + +``` +http://localhost:8080/ui/ +``` + +Your OpenAPI definition lives here: + +``` +http://localhost:8080/openapi.json +``` + +To launch the integration tests, use tox: +``` +sudo pip install tox +tox +``` + +## Running with Docker + +To run the server on a Docker container, please execute the following from the root directory: + +```bash +# building the image +docker build -t cloudharness_model . + +# starting up a container +docker run -p 8080:8080 cloudharness_model +``` \ No newline at end of file diff --git a/libraries/models/cloudharness_model/__init__.py b/libraries/models/cloudharness_model/__init__.py new file mode 100644 index 00000000..cf4f59d6 --- /dev/null +++ b/libraries/models/cloudharness_model/__init__.py @@ -0,0 +1 @@ +from .models import * \ No newline at end of file diff --git a/libraries/models/cloudharness_model/api_client.py b/libraries/models/cloudharness_model/api_client.py new file mode 100644 index 00000000..0093392e --- /dev/null +++ b/libraries/models/cloudharness_model/api_client.py @@ -0,0 +1,860 @@ +""" + cloudharness + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + + The version of the OpenAPI document: 1.0.0 + Generated by: https://openapi-generator.tech +""" + + +import json +import atexit +import mimetypes +from multiprocessing.pool import ThreadPool +import io +import os +import re +import typing +from urllib.parse import quote +from urllib3.fields import RequestField + + +from cloudharness_model import rest +from cloudharness_model.configuration import Configuration +from cloudharness_model.exceptions import ApiTypeError, ApiValueError, ApiException +from cloudharness_model.model_utils import ( + ModelNormal, + ModelSimple, + ModelComposed, + check_allowed_values, + check_validations, + date, + datetime, + deserialize_file, + file_type, + model_to_dict, + none_type, + validate_and_convert_types +) + + +class ApiClient(object): + """Generic API client for OpenAPI client library builds. + + OpenAPI generic API client. This client handles the client- + server communication, and is invariant across implementations. Specifics of + the methods and models for each application are generated from the OpenAPI + templates. + + NOTE: This class is auto generated by OpenAPI Generator. + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param configuration: .Configuration object for this client + :param header_name: a header to pass when making calls to the API. + :param header_value: a header value to pass when making calls to + the API. + :param cookie: a cookie to include in the header when making calls + to the API + :param pool_threads: The number of threads to use for async requests + to the API. More threads means more concurrent API requests. + """ + + _pool = None + + def __init__(self, configuration=None, header_name=None, header_value=None, + cookie=None, pool_threads=1): + if configuration is None: + configuration = Configuration.get_default_copy() + self.configuration = configuration + self.pool_threads = pool_threads + + self.rest_client = rest.RESTClientObject(configuration) + self.default_headers = {} + if header_name is not None: + self.default_headers[header_name] = header_value + self.cookie = cookie + # Set default User-Agent. + self.user_agent = 'OpenAPI-Generator/1.0.0/python' + + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def close(self): + if self._pool: + self._pool.close() + self._pool.join() + self._pool = None + if hasattr(atexit, 'unregister'): + atexit.unregister(self.close) + + @property + def pool(self): + """Create thread pool on first request + avoids instantiating unused threadpool for blocking clients. + """ + if self._pool is None: + atexit.register(self.close) + self._pool = ThreadPool(self.pool_threads) + return self._pool + + @property + def user_agent(self): + """User agent for this API client""" + return self.default_headers['User-Agent'] + + @user_agent.setter + def user_agent(self, value): + self.default_headers['User-Agent'] = value + + def set_default_header(self, header_name, header_value): + self.default_headers[header_name] = header_value + + def __call_api( + self, + resource_path: str, + method: str, + path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, + query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, + header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, + body: typing.Optional[typing.Any] = None, + post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, + files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, + response_type: typing.Optional[typing.Tuple[typing.Any]] = None, + auth_settings: typing.Optional[typing.List[str]] = None, + _return_http_data_only: typing.Optional[bool] = None, + collection_formats: typing.Optional[typing.Dict[str, str]] = None, + _preload_content: bool = True, + _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, + _host: typing.Optional[str] = None, + _check_type: typing.Optional[bool] = None, + _content_type: typing.Optional[str] = None + ): + + config = self.configuration + + # header parameters + header_params = header_params or {} + header_params.update(self.default_headers) + if self.cookie: + header_params['Cookie'] = self.cookie + if header_params: + header_params = self.sanitize_for_serialization(header_params) + header_params = dict(self.parameters_to_tuples(header_params, + collection_formats)) + + # path parameters + if path_params: + path_params = self.sanitize_for_serialization(path_params) + path_params = self.parameters_to_tuples(path_params, + collection_formats) + for k, v in path_params: + # specified safe chars, encode everything + resource_path = resource_path.replace( + '{%s}' % k, + quote(str(v), safe=config.safe_chars_for_path_param) + ) + + # query parameters + if query_params: + query_params = self.sanitize_for_serialization(query_params) + query_params = self.parameters_to_tuples(query_params, + collection_formats) + + # post parameters + if post_params or files: + post_params = post_params if post_params else [] + post_params = self.sanitize_for_serialization(post_params) + post_params = self.parameters_to_tuples(post_params, + collection_formats) + post_params.extend(self.files_parameters(files)) + if header_params['Content-Type'].startswith("multipart"): + post_params = self.parameters_to_multipart(post_params, + (dict) ) + + # body + if body: + body = self.sanitize_for_serialization(body) + + # auth setting + self.update_params_for_auth(header_params, query_params, + auth_settings, resource_path, method, body) + + # request url + if _host is None: + url = self.configuration.host + resource_path + else: + # use server/host defined in path or operation instead + url = _host + resource_path + + try: + # perform request and return response + response_data = self.request( + method, url, query_params=query_params, headers=header_params, + post_params=post_params, body=body, + _preload_content=_preload_content, + _request_timeout=_request_timeout) + except ApiException as e: + e.body = e.body.decode('utf-8') + raise e + + self.last_response = response_data + + return_data = response_data + + if not _preload_content: + return (return_data) + return return_data + + # deserialize response data + if response_type: + if response_type != (file_type,): + encoding = "utf-8" + content_type = response_data.getheader('content-type') + if content_type is not None: + match = re.search(r"charset=([a-zA-Z\-\d]+)[\s\;]?", content_type) + if match: + encoding = match.group(1) + response_data.data = response_data.data.decode(encoding) + + return_data = self.deserialize( + response_data, + response_type, + _check_type + ) + else: + return_data = None + + if _return_http_data_only: + return (return_data) + else: + return (return_data, response_data.status, + response_data.getheaders()) + + def parameters_to_multipart(self, params, collection_types): + """Get parameters as list of tuples, formatting as json if value is collection_types + + :param params: Parameters as list of two-tuples + :param dict collection_types: Parameter collection types + :return: Parameters as list of tuple or urllib3.fields.RequestField + """ + new_params = [] + if collection_types is None: + collection_types = (dict) + for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + if isinstance(v, collection_types): # v is instance of collection_type, formatting as application/json + v = json.dumps(v, ensure_ascii=False).encode("utf-8") + field = RequestField(k, v) + field.make_multipart(content_type="application/json; charset=utf-8") + new_params.append(field) + else: + new_params.append((k, v)) + return new_params + + @classmethod + def sanitize_for_serialization(cls, obj): + """Prepares data for transmission before it is sent with the rest client + If obj is None, return None. + If obj is str, int, long, float, bool, return directly. + If obj is datetime.datetime, datetime.date + convert to string in iso8601 format. + If obj is list, sanitize each element in the list. + If obj is dict, return the dict. + If obj is OpenAPI model, return the properties dict. + If obj is io.IOBase, return the bytes + :param obj: The data to serialize. + :return: The serialized form of data. + """ + if isinstance(obj, (ModelNormal, ModelComposed)): + return { + key: cls.sanitize_for_serialization(val) for key, val in model_to_dict(obj, serialize=True).items() + } + elif isinstance(obj, io.IOBase): + return cls.get_file_data_and_close_file(obj) + elif isinstance(obj, (str, int, float, none_type, bool)): + return obj + elif isinstance(obj, (datetime, date)): + return obj.isoformat() + elif isinstance(obj, ModelSimple): + return cls.sanitize_for_serialization(obj.value) + elif isinstance(obj, (list, tuple)): + return [cls.sanitize_for_serialization(item) for item in obj] + if isinstance(obj, dict): + return {key: cls.sanitize_for_serialization(val) for key, val in obj.items()} + raise ApiValueError('Unable to prepare type {} for serialization'.format(obj.__class__.__name__)) + + def deserialize(self, response, response_type, _check_type): + """Deserializes response into an object. + + :param response: RESTResponse object to be deserialized. + :param response_type: For the response, a tuple containing: + valid classes + a list containing valid classes (for list schemas) + a dict containing a tuple of valid classes as the value + Example values: + (str,) + (Pet,) + (float, none_type) + ([int, none_type],) + ({str: (bool, str, int, float, date, datetime, str, none_type)},) + :param _check_type: boolean, whether to check the types of the data + received from the server + :type _check_type: bool + + :return: deserialized object. + """ + # handle file downloading + # save response body into a tmp file and return the instance + if response_type == (file_type,): + content_disposition = response.getheader("Content-Disposition") + return deserialize_file(response.data, self.configuration, + content_disposition=content_disposition) + + # fetch data from response object + try: + received_data = json.loads(response.data) + except ValueError: + received_data = response.data + + # store our data under the key of 'received_data' so users have some + # context if they are deserializing a string and the data type is wrong + deserialized_data = validate_and_convert_types( + received_data, + response_type, + ['received_data'], + True, + _check_type, + configuration=self.configuration + ) + return deserialized_data + + def call_api( + self, + resource_path: str, + method: str, + path_params: typing.Optional[typing.Dict[str, typing.Any]] = None, + query_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, + header_params: typing.Optional[typing.Dict[str, typing.Any]] = None, + body: typing.Optional[typing.Any] = None, + post_params: typing.Optional[typing.List[typing.Tuple[str, typing.Any]]] = None, + files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None, + response_type: typing.Optional[typing.Tuple[typing.Any]] = None, + auth_settings: typing.Optional[typing.List[str]] = None, + async_req: typing.Optional[bool] = None, + _return_http_data_only: typing.Optional[bool] = None, + collection_formats: typing.Optional[typing.Dict[str, str]] = None, + _preload_content: bool = True, + _request_timeout: typing.Optional[typing.Union[int, float, typing.Tuple]] = None, + _host: typing.Optional[str] = None, + _check_type: typing.Optional[bool] = None + ): + """Makes the HTTP request (synchronous) and returns deserialized data. + + To make an async_req request, set the async_req parameter. + + :param resource_path: Path to method endpoint. + :param method: Method to call. + :param path_params: Path parameters in the url. + :param query_params: Query parameters in the url. + :param header_params: Header parameters to be + placed in the request header. + :param body: Request body. + :param post_params dict: Request post form parameters, + for `application/x-www-form-urlencoded`, `multipart/form-data`. + :param auth_settings list: Auth Settings names for the request. + :param response_type: For the response, a tuple containing: + valid classes + a list containing valid classes (for list schemas) + a dict containing a tuple of valid classes as the value + Example values: + (str,) + (Pet,) + (float, none_type) + ([int, none_type],) + ({str: (bool, str, int, float, date, datetime, str, none_type)},) + :param files: key -> field name, value -> a list of open file + objects for `multipart/form-data`. + :type files: dict + :param async_req bool: execute request asynchronously + :type async_req: bool, optional + :param _return_http_data_only: response data without head status code + and headers + :type _return_http_data_only: bool, optional + :param collection_formats: dict of collection formats for path, query, + header, and post parameters. + :type collection_formats: dict, optional + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :type _preload_content: bool, optional + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :param _check_type: boolean describing if the data back from the server + should have its type checked. + :type _check_type: bool, optional + :return: + If async_req parameter is True, + the request will be called asynchronously. + The method will return the request thread. + If parameter async_req is False or missing, + then the method will return the response directly. + """ + if not async_req: + return self.__call_api(resource_path, method, + path_params, query_params, header_params, + body, post_params, files, + response_type, auth_settings, + _return_http_data_only, collection_formats, + _preload_content, _request_timeout, _host, + _check_type) + + return self.pool.apply_async(self.__call_api, (resource_path, + method, path_params, + query_params, + header_params, body, + post_params, files, + response_type, + auth_settings, + _return_http_data_only, + collection_formats, + _preload_content, + _request_timeout, + _host, _check_type)) + + def request(self, method, url, query_params=None, headers=None, + post_params=None, body=None, _preload_content=True, + _request_timeout=None): + """Makes the HTTP request using RESTClient.""" + if method == "GET": + return self.rest_client.GET(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "HEAD": + return self.rest_client.HEAD(url, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + headers=headers) + elif method == "OPTIONS": + return self.rest_client.OPTIONS(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "POST": + return self.rest_client.POST(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PUT": + return self.rest_client.PUT(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "PATCH": + return self.rest_client.PATCH(url, + query_params=query_params, + headers=headers, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + elif method == "DELETE": + return self.rest_client.DELETE(url, + query_params=query_params, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + else: + raise ApiValueError( + "http method must be `GET`, `HEAD`, `OPTIONS`," + " `POST`, `PATCH`, `PUT` or `DELETE`." + ) + + def parameters_to_tuples(self, params, collection_formats): + """Get parameters as list of tuples, formatting collections. + + :param params: Parameters as dict or list of two-tuples + :param dict collection_formats: Parameter collection formats + :return: Parameters as list of tuples, collections formatted + """ + new_params = [] + if collection_formats is None: + collection_formats = {} + for k, v in params.items() if isinstance(params, dict) else params: # noqa: E501 + if k in collection_formats: + collection_format = collection_formats[k] + if collection_format == 'multi': + new_params.extend((k, value) for value in v) + else: + if collection_format == 'ssv': + delimiter = ' ' + elif collection_format == 'tsv': + delimiter = '\t' + elif collection_format == 'pipes': + delimiter = '|' + else: # csv is the default + delimiter = ',' + new_params.append( + (k, delimiter.join(str(value) for value in v))) + else: + new_params.append((k, v)) + return new_params + + @staticmethod + def get_file_data_and_close_file(file_instance: io.IOBase) -> bytes: + file_data = file_instance.read() + file_instance.close() + return file_data + + def files_parameters(self, files: typing.Optional[typing.Dict[str, typing.List[io.IOBase]]] = None): + """Builds form parameters. + + :param files: None or a dict with key=param_name and + value is a list of open file objects + :return: List of tuples of form parameters with file data + """ + if files is None: + return [] + + params = [] + for param_name, file_instances in files.items(): + if file_instances is None: + # if the file field is nullable, skip None values + continue + for file_instance in file_instances: + if file_instance is None: + # if the file field is nullable, skip None values + continue + if file_instance.closed is True: + raise ApiValueError( + "Cannot read a closed file. The passed in file_type " + "for %s must be open." % param_name + ) + filename = os.path.basename(file_instance.name) + filedata = self.get_file_data_and_close_file(file_instance) + mimetype = (mimetypes.guess_type(filename)[0] or + 'application/octet-stream') + params.append( + tuple([param_name, tuple([filename, filedata, mimetype])])) + + return params + + def select_header_accept(self, accepts): + """Returns `Accept` based on an array of accepts provided. + + :param accepts: List of headers. + :return: Accept (e.g. application/json). + """ + if not accepts: + return + + accepts = [x.lower() for x in accepts] + + if 'application/json' in accepts: + return 'application/json' + else: + return ', '.join(accepts) + + def select_header_content_type(self, content_types, method=None, body=None): + """Returns `Content-Type` based on an array of content_types provided. + + :param content_types: List of content-types. + :param method: http method (e.g. POST, PATCH). + :param body: http body to send. + :return: Content-Type (e.g. application/json). + """ + if not content_types: + return 'application/json' + + content_types = [x.lower() for x in content_types] + + if (method == 'PATCH' and + 'application/json-patch+json' in content_types and + isinstance(body, list)): + return 'application/json-patch+json' + + if 'application/json' in content_types or '*/*' in content_types: + return 'application/json' + else: + return content_types[0] + + def update_params_for_auth(self, headers, queries, auth_settings, + resource_path, method, body): + """Updates header and query params based on authentication setting. + + :param headers: Header parameters dict to be updated. + :param queries: Query parameters tuple list to be updated. + :param auth_settings: Authentication setting identifiers list. + :param resource_path: A string representation of the HTTP request resource path. + :param method: A string representation of the HTTP request method. + :param body: A object representing the body of the HTTP request. + The object type is the return value of _encoder.default(). + """ + if not auth_settings: + return + + for auth in auth_settings: + auth_setting = self.configuration.auth_settings().get(auth) + if auth_setting: + if auth_setting['in'] == 'cookie': + headers['Cookie'] = auth_setting['value'] + elif auth_setting['in'] == 'header': + if auth_setting['type'] != 'http-signature': + headers[auth_setting['key']] = auth_setting['value'] + elif auth_setting['in'] == 'query': + queries.append((auth_setting['key'], auth_setting['value'])) + else: + raise ApiValueError( + 'Authentication token must be in `query` or `header`' + ) + + +class Endpoint(object): + def __init__(self, settings=None, params_map=None, root_map=None, + headers_map=None, api_client=None, callable=None): + """Creates an endpoint + + Args: + settings (dict): see below key value pairs + 'response_type' (tuple/None): response type + 'auth' (list): a list of auth type keys + 'endpoint_path' (str): the endpoint path + 'operation_id' (str): endpoint string identifier + 'http_method' (str): POST/PUT/PATCH/GET etc + 'servers' (list): list of str servers that this endpoint is at + params_map (dict): see below key value pairs + 'all' (list): list of str endpoint parameter names + 'required' (list): list of required parameter names + 'nullable' (list): list of nullable parameter names + 'enum' (list): list of parameters with enum values + 'validation' (list): list of parameters with validations + root_map + 'validations' (dict): the dict mapping endpoint parameter tuple + paths to their validation dictionaries + 'allowed_values' (dict): the dict mapping endpoint parameter + tuple paths to their allowed_values (enum) dictionaries + 'openapi_types' (dict): param_name to openapi type + 'attribute_map' (dict): param_name to camelCase name + 'location_map' (dict): param_name to 'body', 'file', 'form', + 'header', 'path', 'query' + collection_format_map (dict): param_name to `csv` etc. + headers_map (dict): see below key value pairs + 'accept' (list): list of Accept header strings + 'content_type' (list): list of Content-Type header strings + api_client (ApiClient) api client instance + callable (function): the function which is invoked when the + Endpoint is called + """ + self.settings = settings + self.params_map = params_map + self.params_map['all'].extend([ + 'async_req', + '_host_index', + '_preload_content', + '_request_timeout', + '_return_http_data_only', + '_check_input_type', + '_check_return_type', + '_content_type', + '_spec_property_naming' + ]) + self.params_map['nullable'].extend(['_request_timeout']) + self.validations = root_map['validations'] + self.allowed_values = root_map['allowed_values'] + self.openapi_types = root_map['openapi_types'] + extra_types = { + 'async_req': (bool,), + '_host_index': (none_type, int), + '_preload_content': (bool,), + '_request_timeout': (none_type, float, (float,), [float], int, (int,), [int]), + '_return_http_data_only': (bool,), + '_check_input_type': (bool,), + '_check_return_type': (bool,), + '_spec_property_naming': (bool,), + '_content_type': (none_type, str) + } + self.openapi_types.update(extra_types) + self.attribute_map = root_map['attribute_map'] + self.location_map = root_map['location_map'] + self.collection_format_map = root_map['collection_format_map'] + self.headers_map = headers_map + self.api_client = api_client + self.callable = callable + + def __validate_inputs(self, kwargs): + for param in self.params_map['enum']: + if param in kwargs: + check_allowed_values( + self.allowed_values, + (param,), + kwargs[param] + ) + + for param in self.params_map['validation']: + if param in kwargs: + check_validations( + self.validations, + (param,), + kwargs[param], + configuration=self.api_client.configuration + ) + + if kwargs['_check_input_type'] is False: + return + + for key, value in kwargs.items(): + fixed_val = validate_and_convert_types( + value, + self.openapi_types[key], + [key], + kwargs['_spec_property_naming'], + kwargs['_check_input_type'], + configuration=self.api_client.configuration + ) + kwargs[key] = fixed_val + + def __gather_params(self, kwargs): + params = { + 'body': None, + 'collection_format': {}, + 'file': {}, + 'form': [], + 'header': {}, + 'path': {}, + 'query': [] + } + + for param_name, param_value in kwargs.items(): + param_location = self.location_map.get(param_name) + if param_location is None: + continue + if param_location: + if param_location == 'body': + params['body'] = param_value + continue + base_name = self.attribute_map[param_name] + if (param_location == 'form' and + self.openapi_types[param_name] == (file_type,)): + params['file'][base_name] = [param_value] + elif (param_location == 'form' and + self.openapi_types[param_name] == ([file_type],)): + # param_value is already a list + params['file'][base_name] = param_value + elif param_location in {'form', 'query'}: + param_value_full = (base_name, param_value) + params[param_location].append(param_value_full) + if param_location not in {'form', 'query'}: + params[param_location][base_name] = param_value + collection_format = self.collection_format_map.get(param_name) + if collection_format: + params['collection_format'][base_name] = collection_format + + return params + + def __call__(self, *args, **kwargs): + """ This method is invoked when endpoints are called + Example: + + """ + return self.callable(self, *args, **kwargs) + + def call_with_http_info(self, **kwargs): + + try: + index = self.api_client.configuration.server_operation_index.get( + self.settings['operation_id'], self.api_client.configuration.server_index + ) if kwargs['_host_index'] is None else kwargs['_host_index'] + server_variables = self.api_client.configuration.server_operation_variables.get( + self.settings['operation_id'], self.api_client.configuration.server_variables + ) + _host = self.api_client.configuration.get_host_from_settings( + index, variables=server_variables, servers=self.settings['servers'] + ) + except IndexError: + if self.settings['servers']: + raise ApiValueError( + "Invalid host index. Must be 0 <= index < %s" % + len(self.settings['servers']) + ) + _host = None + + for key, value in kwargs.items(): + if key not in self.params_map['all']: + raise ApiTypeError( + "Got an unexpected parameter '%s'" + " to method `%s`" % + (key, self.settings['operation_id']) + ) + # only throw this nullable ApiValueError if _check_input_type + # is False, if _check_input_type==True we catch this case + # in self.__validate_inputs + if (key not in self.params_map['nullable'] and value is None + and kwargs['_check_input_type'] is False): + raise ApiValueError( + "Value may not be None for non-nullable parameter `%s`" + " when calling `%s`" % + (key, self.settings['operation_id']) + ) + + for key in self.params_map['required']: + if key not in kwargs.keys(): + raise ApiValueError( + "Missing the required parameter `%s` when calling " + "`%s`" % (key, self.settings['operation_id']) + ) + + self.__validate_inputs(kwargs) + + params = self.__gather_params(kwargs) + + accept_headers_list = self.headers_map['accept'] + if accept_headers_list: + params['header']['Accept'] = self.api_client.select_header_accept( + accept_headers_list) + + if kwargs.get('_content_type'): + params['header']['Content-Type'] = kwargs['_content_type'] + else: + content_type_headers_list = self.headers_map['content_type'] + if content_type_headers_list: + if params['body'] != "": + header_list = self.api_client.select_header_content_type( + content_type_headers_list, self.settings['http_method'], + params['body']) + params['header']['Content-Type'] = header_list + + return self.api_client.call_api( + self.settings['endpoint_path'], self.settings['http_method'], + params['path'], + params['query'], + params['header'], + body=params['body'], + post_params=params['form'], + files=params['file'], + response_type=self.settings['response_type'], + auth_settings=self.settings['auth'], + async_req=kwargs['async_req'], + _check_type=kwargs['_check_return_type'], + _return_http_data_only=kwargs['_return_http_data_only'], + _preload_content=kwargs['_preload_content'], + _request_timeout=kwargs['_request_timeout'], + _host=_host, + collection_formats=params['collection_format']) diff --git a/libraries/models/cloudharness_model/configuration.py b/libraries/models/cloudharness_model/configuration.py new file mode 100644 index 00000000..48470064 --- /dev/null +++ b/libraries/models/cloudharness_model/configuration.py @@ -0,0 +1,446 @@ +""" + cloudharness + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + + The version of the OpenAPI document: 1.0.0 + Generated by: https://openapi-generator.tech +""" + + +import copy +import logging +import multiprocessing +import sys +import urllib3 + +from http import client as http_client +from cloudharness_model.exceptions import ApiValueError + + +JSON_SCHEMA_VALIDATION_KEYWORDS = { + 'multipleOf', 'maximum', 'exclusiveMaximum', + 'minimum', 'exclusiveMinimum', 'maxLength', + 'minLength', 'pattern', 'maxItems', 'minItems' +} + +class Configuration(object): + """NOTE: This class is auto generated by OpenAPI Generator + + Ref: https://openapi-generator.tech + Do not edit the class manually. + + :param host: Base url + :param api_key: Dict to store API key(s). + Each entry in the dict specifies an API key. + The dict key is the name of the security scheme in the OAS specification. + The dict value is the API key secret. + :param api_key_prefix: Dict to store API prefix (e.g. Bearer) + The dict key is the name of the security scheme in the OAS specification. + The dict value is an API key prefix when generating the auth data. + :param username: Username for HTTP basic authentication + :param password: Password for HTTP basic authentication + :param discard_unknown_keys: Boolean value indicating whether to discard + unknown properties. A server may send a response that includes additional + properties that are not known by the client in the following scenarios: + 1. The OpenAPI document is incomplete, i.e. it does not match the server + implementation. + 2. The client was generated using an older version of the OpenAPI document + and the server has been upgraded since then. + If a schema in the OpenAPI document defines the additionalProperties attribute, + then all undeclared properties received by the server are injected into the + additional properties map. In that case, there are undeclared properties, and + nothing to discard. + :param disabled_client_side_validations (string): Comma-separated list of + JSON schema validation keywords to disable JSON schema structural validation + rules. The following keywords may be specified: multipleOf, maximum, + exclusiveMaximum, minimum, exclusiveMinimum, maxLength, minLength, pattern, + maxItems, minItems. + By default, the validation is performed for data generated locally by the client + and data received from the server, independent of any validation performed by + the server side. If the input data does not satisfy the JSON schema validation + rules specified in the OpenAPI document, an exception is raised. + If disabled_client_side_validations is set, structural validation is + disabled. This can be useful to troubleshoot data validation problem, such as + when the OpenAPI document validation rules do not match the actual API data + received by the server. + :param server_index: Index to servers configuration. + :param server_variables: Mapping with string values to replace variables in + templated server configuration. The validation of enums is performed for + variables with defined enum values before. + :param server_operation_index: Mapping from operation ID to an index to server + configuration. + :param server_operation_variables: Mapping from operation ID to a mapping with + string values to replace variables in templated server configuration. + The validation of enums is performed for variables with defined enum values before. + :param ssl_ca_cert: str - the path to a file of concatenated CA certificates + in PEM format + + """ + + _default = None + + def __init__(self, host=None, + api_key=None, api_key_prefix=None, + access_token=None, + username=None, password=None, + discard_unknown_keys=False, + disabled_client_side_validations="", + server_index=None, server_variables=None, + server_operation_index=None, server_operation_variables=None, + ssl_ca_cert=None, + ): + """Constructor + """ + self._base_path = "http://localhost" if host is None else host + """Default Base url + """ + self.server_index = 0 if server_index is None and host is None else server_index + self.server_operation_index = server_operation_index or {} + """Default server index + """ + self.server_variables = server_variables or {} + self.server_operation_variables = server_operation_variables or {} + """Default server variables + """ + self.temp_folder_path = None + """Temp file folder for downloading files + """ + # Authentication Settings + self.access_token = access_token + self.api_key = {} + if api_key: + self.api_key = api_key + """dict to store API key(s) + """ + self.api_key_prefix = {} + if api_key_prefix: + self.api_key_prefix = api_key_prefix + """dict to store API prefix (e.g. Bearer) + """ + self.refresh_api_key_hook = None + """function hook to refresh API key if expired + """ + self.username = username + """Username for HTTP basic authentication + """ + self.password = password + """Password for HTTP basic authentication + """ + self.discard_unknown_keys = discard_unknown_keys + self.disabled_client_side_validations = disabled_client_side_validations + self.logger = {} + """Logging Settings + """ + self.logger["package_logger"] = logging.getLogger("cloudharness_model") + self.logger["urllib3_logger"] = logging.getLogger("urllib3") + self.logger_format = '%(asctime)s %(levelname)s %(message)s' + """Log format + """ + self.logger_stream_handler = None + """Log stream handler + """ + self.logger_file_handler = None + """Log file handler + """ + self.logger_file = None + """Debug file location + """ + self.debug = False + """Debug switch + """ + + self.verify_ssl = True + """SSL/TLS verification + Set this to false to skip verifying SSL certificate when calling API + from https server. + """ + self.ssl_ca_cert = ssl_ca_cert + """Set this to customize the certificate file to verify the peer. + """ + self.cert_file = None + """client certificate file + """ + self.key_file = None + """client key file + """ + self.assert_hostname = None + """Set this to True/False to enable/disable SSL hostname verification. + """ + + self.connection_pool_maxsize = multiprocessing.cpu_count() * 5 + """urllib3 connection pool's maximum number of connections saved + per pool. urllib3 uses 1 connection as default value, but this is + not the best value when you are making a lot of possibly parallel + requests to the same host, which is often the case here. + cpu_count * 5 is used as default value to increase performance. + """ + + self.proxy = None + """Proxy URL + """ + self.no_proxy = None + """bypass proxy for host in the no_proxy list. + """ + self.proxy_headers = None + """Proxy headers + """ + self.safe_chars_for_path_param = '' + """Safe chars for path_param + """ + self.retries = None + """Adding retries to override urllib3 default value 3 + """ + # Enable client side validation + self.client_side_validation = True + + # Options to pass down to the underlying urllib3 socket + self.socket_options = None + + def __deepcopy__(self, memo): + cls = self.__class__ + result = cls.__new__(cls) + memo[id(self)] = result + for k, v in self.__dict__.items(): + if k not in ('logger', 'logger_file_handler'): + setattr(result, k, copy.deepcopy(v, memo)) + # shallow copy of loggers + result.logger = copy.copy(self.logger) + # use setters to configure loggers + result.logger_file = self.logger_file + result.debug = self.debug + return result + + def __setattr__(self, name, value): + object.__setattr__(self, name, value) + if name == 'disabled_client_side_validations': + s = set(filter(None, value.split(','))) + for v in s: + if v not in JSON_SCHEMA_VALIDATION_KEYWORDS: + raise ApiValueError( + "Invalid keyword: '{0}''".format(v)) + self._disabled_client_side_validations = s + + @classmethod + def set_default(cls, default): + """Set default instance of configuration. + + It stores default configuration, which can be + returned by get_default_copy method. + + :param default: object of Configuration + """ + cls._default = copy.deepcopy(default) + + @classmethod + def get_default_copy(cls): + """Return new instance of configuration. + + This method returns newly created, based on default constructor, + object of Configuration class or returns a copy of default + configuration passed by the set_default method. + + :return: The configuration object. + """ + if cls._default is not None: + return copy.deepcopy(cls._default) + return Configuration() + + @property + def logger_file(self): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + return self.__logger_file + + @logger_file.setter + def logger_file(self, value): + """The logger file. + + If the logger_file is None, then add stream handler and remove file + handler. Otherwise, add file handler and remove stream handler. + + :param value: The logger_file path. + :type: str + """ + self.__logger_file = value + if self.__logger_file: + # If set logging file, + # then add file handler and remove stream handler. + self.logger_file_handler = logging.FileHandler(self.__logger_file) + self.logger_file_handler.setFormatter(self.logger_formatter) + for _, logger in self.logger.items(): + logger.addHandler(self.logger_file_handler) + + @property + def debug(self): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + return self.__debug + + @debug.setter + def debug(self, value): + """Debug status + + :param value: The debug status, True or False. + :type: bool + """ + self.__debug = value + if self.__debug: + # if debug status is True, turn on debug logging + for _, logger in self.logger.items(): + logger.setLevel(logging.DEBUG) + # turn on http_client debug + http_client.HTTPConnection.debuglevel = 1 + else: + # if debug status is False, turn off debug logging, + # setting log level to default `logging.WARNING` + for _, logger in self.logger.items(): + logger.setLevel(logging.WARNING) + # turn off http_client debug + http_client.HTTPConnection.debuglevel = 0 + + @property + def logger_format(self): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + return self.__logger_format + + @logger_format.setter + def logger_format(self, value): + """The logger format. + + The logger_formatter will be updated when sets logger_format. + + :param value: The format string. + :type: str + """ + self.__logger_format = value + self.logger_formatter = logging.Formatter(self.__logger_format) + + def get_api_key_with_prefix(self, identifier, alias=None): + """Gets API key (with prefix if set). + + :param identifier: The identifier of apiKey. + :param alias: The alternative identifier of apiKey. + :return: The token for api key authentication. + """ + if self.refresh_api_key_hook is not None: + self.refresh_api_key_hook(self) + key = self.api_key.get(identifier, self.api_key.get(alias) if alias is not None else None) + if key: + prefix = self.api_key_prefix.get(identifier) + if prefix: + return "%s %s" % (prefix, key) + else: + return key + + def get_basic_auth_token(self): + """Gets HTTP basic authentication header (string). + + :return: The token for basic HTTP authentication. + """ + username = "" + if self.username is not None: + username = self.username + password = "" + if self.password is not None: + password = self.password + return urllib3.util.make_headers( + basic_auth=username + ':' + password + ).get('authorization') + + def auth_settings(self): + """Gets Auth Settings dict for api client. + + :return: The Auth Settings information dict. + """ + auth = {} + return auth + + def to_debug_report(self): + """Gets the essential information for debugging. + + :return: The report for debugging. + """ + return "Python SDK Debug Report:\n"\ + "OS: {env}\n"\ + "Python Version: {pyversion}\n"\ + "Version of the API: 1.0.0\n"\ + "SDK Package Version: 1.0.0".\ + format(env=sys.platform, pyversion=sys.version) + + def get_host_settings(self): + """Gets an array of host settings + + :return: An array of host settings + """ + return [ + { + 'url': "", + 'description': "No description provided", + } + ] + + def get_host_from_settings(self, index, variables=None, servers=None): + """Gets host URL based on the index and variables + :param index: array index of the host settings + :param variables: hash of variable and the corresponding value + :param servers: an array of host settings or None + :return: URL based on host settings + """ + if index is None: + return self._base_path + + variables = {} if variables is None else variables + servers = self.get_host_settings() if servers is None else servers + + try: + server = servers[index] + except IndexError: + raise ValueError( + "Invalid index {0} when selecting the host settings. " + "Must be less than {1}".format(index, len(servers))) + + url = server['url'] + + # go through variables and replace placeholders + for variable_name, variable in server.get('variables', {}).items(): + used_value = variables.get( + variable_name, variable['default_value']) + + if 'enum_values' in variable \ + and used_value not in variable['enum_values']: + raise ValueError( + "The variable `{0}` in the host URL has invalid value " + "{1}. Must be {2}.".format( + variable_name, variables[variable_name], + variable['enum_values'])) + + url = url.replace("{" + variable_name + "}", used_value) + + return url + + @property + def host(self): + """Return generated host.""" + return self.get_host_from_settings(self.server_index, variables=self.server_variables) + + @host.setter + def host(self, value): + """Fix base path.""" + self._base_path = value + self.server_index = None diff --git a/libraries/models/cloudharness_model/encoder.py b/libraries/models/cloudharness_model/encoder.py new file mode 100644 index 00000000..1688b894 --- /dev/null +++ b/libraries/models/cloudharness_model/encoder.py @@ -0,0 +1,20 @@ +from connexion.apps.flask_app import FlaskJSONEncoder +import six + +from cloudharness_model.models.base_model_ import Model + + +class JSONEncoder(FlaskJSONEncoder): + include_nulls = False + + def default(self, o): + if isinstance(o, Model): + dikt = {} + for attr, _ in six.iteritems(o.openapi_types): + value = getattr(o, attr) + if value is None and not self.include_nulls: + continue + attr = o.attribute_map[attr] + dikt[attr] = value + return dikt + return FlaskJSONEncoder.default(self, o) diff --git a/libraries/models/cloudharness_model/exceptions.py b/libraries/models/cloudharness_model/exceptions.py new file mode 100644 index 00000000..281e3d1a --- /dev/null +++ b/libraries/models/cloudharness_model/exceptions.py @@ -0,0 +1,159 @@ +""" + cloudharness + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + + The version of the OpenAPI document: 1.0.0 + Generated by: https://openapi-generator.tech +""" + + + +class OpenApiException(Exception): + """The base exception class for all OpenAPIExceptions""" + + +class ApiTypeError(OpenApiException, TypeError): + def __init__(self, msg, path_to_item=None, valid_classes=None, + key_type=None): + """ Raises an exception for TypeErrors + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list): a list of keys an indices to get to the + current_item + None if unset + valid_classes (tuple): the primitive classes that current item + should be an instance of + None if unset + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a list + None if unset + """ + self.path_to_item = path_to_item + self.valid_classes = valid_classes + self.key_type = key_type + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiTypeError, self).__init__(full_msg) + + +class ApiValueError(OpenApiException, ValueError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (list) the path to the exception in the + received_data dict. None if unset + """ + + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiValueError, self).__init__(full_msg) + + +class ApiAttributeError(OpenApiException, AttributeError): + def __init__(self, msg, path_to_item=None): + """ + Raised when an attribute reference or assignment fails. + + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiAttributeError, self).__init__(full_msg) + + +class ApiKeyError(OpenApiException, KeyError): + def __init__(self, msg, path_to_item=None): + """ + Args: + msg (str): the exception message + + Keyword Args: + path_to_item (None/list) the path to the exception in the + received_data dict + """ + self.path_to_item = path_to_item + full_msg = msg + if path_to_item: + full_msg = "{0} at {1}".format(msg, render_path(path_to_item)) + super(ApiKeyError, self).__init__(full_msg) + + +class ApiException(OpenApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + if http_resp: + self.status = http_resp.status + self.reason = http_resp.reason + self.body = http_resp.data + self.headers = http_resp.getheaders() + else: + self.status = status + self.reason = reason + self.body = None + self.headers = None + + def __str__(self): + """Custom error messages for exception""" + error_message = "({0})\n"\ + "Reason: {1}\n".format(self.status, self.reason) + if self.headers: + error_message += "HTTP response headers: {0}\n".format( + self.headers) + + if self.body: + error_message += "HTTP response body: {0}\n".format(self.body) + + return error_message + + +class NotFoundException(ApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + super(NotFoundException, self).__init__(status, reason, http_resp) + + +class UnauthorizedException(ApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + super(UnauthorizedException, self).__init__(status, reason, http_resp) + + +class ForbiddenException(ApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + super(ForbiddenException, self).__init__(status, reason, http_resp) + + +class ServiceException(ApiException): + + def __init__(self, status=None, reason=None, http_resp=None): + super(ServiceException, self).__init__(status, reason, http_resp) + + +def render_path(path_to_item): + """Returns a string representation of a path""" + result = "" + for pth in path_to_item: + if isinstance(pth, int): + result += "[{0}]".format(pth) + else: + result += "['{0}']".format(pth) + return result diff --git a/libraries/models/cloudharness_model/models/__init__.py b/libraries/models/cloudharness_model/models/__init__.py new file mode 100644 index 00000000..4fde01a7 --- /dev/null +++ b/libraries/models/cloudharness_model/models/__init__.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +# flake8: noqa +from __future__ import absolute_import +# import models into model package +from cloudharness_model.models.application_config import ApplicationConfig +from cloudharness_model.models.application_dependencies_config import ApplicationDependenciesConfig +from cloudharness_model.models.application_harness_config import ApplicationHarnessConfig +from cloudharness_model.models.application_probe import ApplicationProbe +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec +from cloudharness_model.models.backup_config import BackupConfig +from cloudharness_model.models.cdc_event import CDCEvent +from cloudharness_model.models.cdc_event_meta import CDCEventMeta +from cloudharness_model.models.cpu_memory_config import CpuMemoryConfig +from cloudharness_model.models.database_deployment_config import DatabaseDeploymentConfig +from cloudharness_model.models.database_deployment_config_all_of import DatabaseDeploymentConfigAllOf +from cloudharness_model.models.deployment_auto_artifact_config import DeploymentAutoArtifactConfig +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf +from cloudharness_model.models.file_resources_config import FileResourcesConfig +from cloudharness_model.models.harness_main_config import HarnessMainConfig +from cloudharness_model.models.ingress_config import IngressConfig +from cloudharness_model.models.ingress_config_all_of import IngressConfigAllOf +from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt +from cloudharness_model.models.jupyter_hub_config import JupyterHubConfig +from cloudharness_model.models.name_value import NameValue +from cloudharness_model.models.registry_config import RegistryConfig +from cloudharness_model.models.service_auto_artifact_config import ServiceAutoArtifactConfig +from cloudharness_model.models.service_auto_artifact_config_all_of import ServiceAutoArtifactConfigAllOf +from cloudharness_model.models.uri_role_mapping_config import UriRoleMappingConfig +from cloudharness_model.models.user import User +from cloudharness_model.models.user_credential import UserCredential +from cloudharness_model.models.user_group import UserGroup diff --git a/libraries/models/cloudharness_model/models/application_config.py b/libraries/models/cloudharness_model/models/application_config.py new file mode 100644 index 00000000..2ea86d6e --- /dev/null +++ b/libraries/models/cloudharness_model/models/application_config.py @@ -0,0 +1,68 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.application_harness_config import ApplicationHarnessConfig +from cloudharness_model import util + +from cloudharness_model.models.application_harness_config import ApplicationHarnessConfig # noqa: E501 + +class ApplicationConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, harness=None): # noqa: E501 + """ApplicationConfig - a model defined in OpenAPI + + :param harness: The harness of this ApplicationConfig. # noqa: E501 + :type harness: ApplicationHarnessConfig + """ + self.openapi_types = { + 'harness': ApplicationHarnessConfig + } + + self.attribute_map = { + 'harness': 'harness' + } + + self._harness = harness + + @classmethod + def from_dict(cls, dikt) -> 'ApplicationConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The ApplicationConfig of this ApplicationConfig. # noqa: E501 + :rtype: ApplicationConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def harness(self): + """Gets the harness of this ApplicationConfig. + + + :return: The harness of this ApplicationConfig. + :rtype: ApplicationHarnessConfig + """ + return self._harness + + @harness.setter + def harness(self, harness): + """Sets the harness of this ApplicationConfig. + + + :param harness: The harness of this ApplicationConfig. + :type harness: ApplicationHarnessConfig + """ + if harness is None: + raise ValueError("Invalid value for `harness`, must not be `None`") # noqa: E501 + + self._harness = harness diff --git a/libraries/models/cloudharness_model/models/application_dependencies_config.py b/libraries/models/cloudharness_model/models/application_dependencies_config.py new file mode 100644 index 00000000..d4330359 --- /dev/null +++ b/libraries/models/cloudharness_model/models/application_dependencies_config.py @@ -0,0 +1,122 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class ApplicationDependenciesConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, hard=None, soft=None, build=None): # noqa: E501 + """ApplicationDependenciesConfig - a model defined in OpenAPI + + :param hard: The hard of this ApplicationDependenciesConfig. # noqa: E501 + :type hard: List[str] + :param soft: The soft of this ApplicationDependenciesConfig. # noqa: E501 + :type soft: List[str] + :param build: The build of this ApplicationDependenciesConfig. # noqa: E501 + :type build: List[str] + """ + self.openapi_types = { + 'hard': List[str], + 'soft': List[str], + 'build': List[str] + } + + self.attribute_map = { + 'hard': 'hard', + 'soft': 'soft', + 'build': 'build' + } + + self._hard = hard + self._soft = soft + self._build = build + + @classmethod + def from_dict(cls, dikt) -> 'ApplicationDependenciesConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The ApplicationDependenciesConfig of this ApplicationDependenciesConfig. # noqa: E501 + :rtype: ApplicationDependenciesConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def hard(self): + """Gets the hard of this ApplicationDependenciesConfig. + + Hard dependencies indicate that the application may not start without these other applications. # noqa: E501 + + :return: The hard of this ApplicationDependenciesConfig. + :rtype: List[str] + """ + return self._hard + + @hard.setter + def hard(self, hard): + """Sets the hard of this ApplicationDependenciesConfig. + + Hard dependencies indicate that the application may not start without these other applications. # noqa: E501 + + :param hard: The hard of this ApplicationDependenciesConfig. + :type hard: List[str] + """ + + self._hard = hard + + @property + def soft(self): + """Gets the soft of this ApplicationDependenciesConfig. + + Soft dependencies indicate that the application will work partially without these other applications. # noqa: E501 + + :return: The soft of this ApplicationDependenciesConfig. + :rtype: List[str] + """ + return self._soft + + @soft.setter + def soft(self, soft): + """Sets the soft of this ApplicationDependenciesConfig. + + Soft dependencies indicate that the application will work partially without these other applications. # noqa: E501 + + :param soft: The soft of this ApplicationDependenciesConfig. + :type soft: List[str] + """ + + self._soft = soft + + @property + def build(self): + """Gets the build of this ApplicationDependenciesConfig. + + Hard dependencies indicate that the application Docker image build requires these base/common images # noqa: E501 + + :return: The build of this ApplicationDependenciesConfig. + :rtype: List[str] + """ + return self._build + + @build.setter + def build(self, build): + """Sets the build of this ApplicationDependenciesConfig. + + Hard dependencies indicate that the application Docker image build requires these base/common images # noqa: E501 + + :param build: The build of this ApplicationDependenciesConfig. + :type build: List[str] + """ + + self._build = build diff --git a/libraries/models/cloudharness_model/models/application_harness_config.py b/libraries/models/cloudharness_model/models/application_harness_config.py new file mode 100644 index 00000000..d5a00dfe --- /dev/null +++ b/libraries/models/cloudharness_model/models/application_harness_config.py @@ -0,0 +1,540 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.application_dependencies_config import ApplicationDependenciesConfig +from cloudharness_model.models.application_probe import ApplicationProbe +from cloudharness_model.models.database_deployment_config import DatabaseDeploymentConfig +from cloudharness_model.models.deployment_auto_artifact_config import DeploymentAutoArtifactConfig +from cloudharness_model.models.file_resources_config import FileResourcesConfig +from cloudharness_model.models.jupyter_hub_config import JupyterHubConfig +from cloudharness_model.models.service_auto_artifact_config import ServiceAutoArtifactConfig +from cloudharness_model.models.uri_role_mapping_config import UriRoleMappingConfig +import re +from cloudharness_model import util + +from cloudharness_model.models.application_dependencies_config import ApplicationDependenciesConfig # noqa: E501 +from cloudharness_model.models.application_probe import ApplicationProbe # noqa: E501 +from cloudharness_model.models.database_deployment_config import DatabaseDeploymentConfig # noqa: E501 +from cloudharness_model.models.deployment_auto_artifact_config import DeploymentAutoArtifactConfig # noqa: E501 +from cloudharness_model.models.file_resources_config import FileResourcesConfig # noqa: E501 +from cloudharness_model.models.jupyter_hub_config import JupyterHubConfig # noqa: E501 +from cloudharness_model.models.service_auto_artifact_config import ServiceAutoArtifactConfig # noqa: E501 +from cloudharness_model.models.uri_role_mapping_config import UriRoleMappingConfig # noqa: E501 +import re # noqa: E501 + +class ApplicationHarnessConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, deployment=None, service=None, subdomain=None, aliases=None, domain=None, dependencies=None, secured=None, uri_role_mapping=None, secrets=None, use_services=None, database=None, resources=None, readiness_probe=None, startup_probe=None, liveness_probe=None, source_root=None, name=None, jupyterhub=None): # noqa: E501 + """ApplicationHarnessConfig - a model defined in OpenAPI + + :param deployment: The deployment of this ApplicationHarnessConfig. # noqa: E501 + :type deployment: DeploymentAutoArtifactConfig + :param service: The service of this ApplicationHarnessConfig. # noqa: E501 + :type service: ServiceAutoArtifactConfig + :param subdomain: The subdomain of this ApplicationHarnessConfig. # noqa: E501 + :type subdomain: str + :param aliases: The aliases of this ApplicationHarnessConfig. # noqa: E501 + :type aliases: List[str] + :param domain: The domain of this ApplicationHarnessConfig. # noqa: E501 + :type domain: str + :param dependencies: The dependencies of this ApplicationHarnessConfig. # noqa: E501 + :type dependencies: ApplicationDependenciesConfig + :param secured: The secured of this ApplicationHarnessConfig. # noqa: E501 + :type secured: bool + :param uri_role_mapping: The uri_role_mapping of this ApplicationHarnessConfig. # noqa: E501 + :type uri_role_mapping: List[UriRoleMappingConfig] + :param secrets: The secrets of this ApplicationHarnessConfig. # noqa: E501 + :type secrets: Dict[str, str] + :param use_services: The use_services of this ApplicationHarnessConfig. # noqa: E501 + :type use_services: List[str] + :param database: The database of this ApplicationHarnessConfig. # noqa: E501 + :type database: DatabaseDeploymentConfig + :param resources: The resources of this ApplicationHarnessConfig. # noqa: E501 + :type resources: List[FileResourcesConfig] + :param readiness_probe: The readiness_probe of this ApplicationHarnessConfig. # noqa: E501 + :type readiness_probe: ApplicationProbe + :param startup_probe: The startup_probe of this ApplicationHarnessConfig. # noqa: E501 + :type startup_probe: ApplicationProbe + :param liveness_probe: The liveness_probe of this ApplicationHarnessConfig. # noqa: E501 + :type liveness_probe: ApplicationProbe + :param source_root: The source_root of this ApplicationHarnessConfig. # noqa: E501 + :type source_root: str + :param name: The name of this ApplicationHarnessConfig. # noqa: E501 + :type name: str + :param jupyterhub: The jupyterhub of this ApplicationHarnessConfig. # noqa: E501 + :type jupyterhub: JupyterHubConfig + """ + self.openapi_types = { + 'deployment': DeploymentAutoArtifactConfig, + 'service': ServiceAutoArtifactConfig, + 'subdomain': str, + 'aliases': List[str], + 'domain': str, + 'dependencies': ApplicationDependenciesConfig, + 'secured': bool, + 'uri_role_mapping': List[UriRoleMappingConfig], + 'secrets': Dict[str, str], + 'use_services': List[str], + 'database': DatabaseDeploymentConfig, + 'resources': List[FileResourcesConfig], + 'readiness_probe': ApplicationProbe, + 'startup_probe': ApplicationProbe, + 'liveness_probe': ApplicationProbe, + 'source_root': str, + 'name': str, + 'jupyterhub': JupyterHubConfig + } + + self.attribute_map = { + 'deployment': 'deployment', + 'service': 'service', + 'subdomain': 'subdomain', + 'aliases': 'aliases', + 'domain': 'domain', + 'dependencies': 'dependencies', + 'secured': 'secured', + 'uri_role_mapping': 'uri_role_mapping', + 'secrets': 'secrets', + 'use_services': 'use_services', + 'database': 'database', + 'resources': 'resources', + 'readiness_probe': 'readinessProbe', + 'startup_probe': 'startupProbe', + 'liveness_probe': 'livenessProbe', + 'source_root': 'sourceRoot', + 'name': 'name', + 'jupyterhub': 'jupyterhub' + } + + self._deployment = deployment + self._service = service + self._subdomain = subdomain + self._aliases = aliases + self._domain = domain + self._dependencies = dependencies + self._secured = secured + self._uri_role_mapping = uri_role_mapping + self._secrets = secrets + self._use_services = use_services + self._database = database + self._resources = resources + self._readiness_probe = readiness_probe + self._startup_probe = startup_probe + self._liveness_probe = liveness_probe + self._source_root = source_root + self._name = name + self._jupyterhub = jupyterhub + + @classmethod + def from_dict(cls, dikt) -> 'ApplicationHarnessConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The ApplicationHarnessConfig of this ApplicationHarnessConfig. # noqa: E501 + :rtype: ApplicationHarnessConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def deployment(self): + """Gets the deployment of this ApplicationHarnessConfig. + + + :return: The deployment of this ApplicationHarnessConfig. + :rtype: DeploymentAutoArtifactConfig + """ + return self._deployment + + @deployment.setter + def deployment(self, deployment): + """Sets the deployment of this ApplicationHarnessConfig. + + + :param deployment: The deployment of this ApplicationHarnessConfig. + :type deployment: DeploymentAutoArtifactConfig + """ + + self._deployment = deployment + + @property + def service(self): + """Gets the service of this ApplicationHarnessConfig. + + + :return: The service of this ApplicationHarnessConfig. + :rtype: ServiceAutoArtifactConfig + """ + return self._service + + @service.setter + def service(self, service): + """Sets the service of this ApplicationHarnessConfig. + + + :param service: The service of this ApplicationHarnessConfig. + :type service: ServiceAutoArtifactConfig + """ + + self._service = service + + @property + def subdomain(self): + """Gets the subdomain of this ApplicationHarnessConfig. + + If specified, an ingress will be created at [subdomain].[.Values.domain] # noqa: E501 + + :return: The subdomain of this ApplicationHarnessConfig. + :rtype: str + """ + return self._subdomain + + @subdomain.setter + def subdomain(self, subdomain): + """Sets the subdomain of this ApplicationHarnessConfig. + + If specified, an ingress will be created at [subdomain].[.Values.domain] # noqa: E501 + + :param subdomain: The subdomain of this ApplicationHarnessConfig. + :type subdomain: str + """ + + self._subdomain = subdomain + + @property + def aliases(self): + """Gets the aliases of this ApplicationHarnessConfig. + + If specified, an ingress will be created at [alias].[.Values.domain] for each alias # noqa: E501 + + :return: The aliases of this ApplicationHarnessConfig. + :rtype: List[str] + """ + return self._aliases + + @aliases.setter + def aliases(self, aliases): + """Sets the aliases of this ApplicationHarnessConfig. + + If specified, an ingress will be created at [alias].[.Values.domain] for each alias # noqa: E501 + + :param aliases: The aliases of this ApplicationHarnessConfig. + :type aliases: List[str] + """ + + self._aliases = aliases + + @property + def domain(self): + """Gets the domain of this ApplicationHarnessConfig. + + If specified, an ingress will be created at [domain] # noqa: E501 + + :return: The domain of this ApplicationHarnessConfig. + :rtype: str + """ + return self._domain + + @domain.setter + def domain(self, domain): + """Sets the domain of this ApplicationHarnessConfig. + + If specified, an ingress will be created at [domain] # noqa: E501 + + :param domain: The domain of this ApplicationHarnessConfig. + :type domain: str + """ + + self._domain = domain + + @property + def dependencies(self): + """Gets the dependencies of this ApplicationHarnessConfig. + + + :return: The dependencies of this ApplicationHarnessConfig. + :rtype: ApplicationDependenciesConfig + """ + return self._dependencies + + @dependencies.setter + def dependencies(self, dependencies): + """Sets the dependencies of this ApplicationHarnessConfig. + + + :param dependencies: The dependencies of this ApplicationHarnessConfig. + :type dependencies: ApplicationDependenciesConfig + """ + + self._dependencies = dependencies + + @property + def secured(self): + """Gets the secured of this ApplicationHarnessConfig. + + When true, the application is shielded with a getekeeper # noqa: E501 + + :return: The secured of this ApplicationHarnessConfig. + :rtype: bool + """ + return self._secured + + @secured.setter + def secured(self, secured): + """Sets the secured of this ApplicationHarnessConfig. + + When true, the application is shielded with a getekeeper # noqa: E501 + + :param secured: The secured of this ApplicationHarnessConfig. + :type secured: bool + """ + + self._secured = secured + + @property + def uri_role_mapping(self): + """Gets the uri_role_mapping of this ApplicationHarnessConfig. + + Map uri/roles to secure with the Gatekeeper (if `secured: true`) # noqa: E501 + + :return: The uri_role_mapping of this ApplicationHarnessConfig. + :rtype: List[UriRoleMappingConfig] + """ + return self._uri_role_mapping + + @uri_role_mapping.setter + def uri_role_mapping(self, uri_role_mapping): + """Sets the uri_role_mapping of this ApplicationHarnessConfig. + + Map uri/roles to secure with the Gatekeeper (if `secured: true`) # noqa: E501 + + :param uri_role_mapping: The uri_role_mapping of this ApplicationHarnessConfig. + :type uri_role_mapping: List[UriRoleMappingConfig] + """ + + self._uri_role_mapping = uri_role_mapping + + @property + def secrets(self): + """Gets the secrets of this ApplicationHarnessConfig. + + + :return: The secrets of this ApplicationHarnessConfig. + :rtype: Dict[str, str] + """ + return self._secrets + + @secrets.setter + def secrets(self, secrets): + """Sets the secrets of this ApplicationHarnessConfig. + + + :param secrets: The secrets of this ApplicationHarnessConfig. + :type secrets: Dict[str, str] + """ + + self._secrets = secrets + + @property + def use_services(self): + """Gets the use_services of this ApplicationHarnessConfig. + + Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` # noqa: E501 + + :return: The use_services of this ApplicationHarnessConfig. + :rtype: List[str] + """ + return self._use_services + + @use_services.setter + def use_services(self, use_services): + """Sets the use_services of this ApplicationHarnessConfig. + + Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` # noqa: E501 + + :param use_services: The use_services of this ApplicationHarnessConfig. + :type use_services: List[str] + """ + + self._use_services = use_services + + @property + def database(self): + """Gets the database of this ApplicationHarnessConfig. + + + :return: The database of this ApplicationHarnessConfig. + :rtype: DatabaseDeploymentConfig + """ + return self._database + + @database.setter + def database(self, database): + """Sets the database of this ApplicationHarnessConfig. + + + :param database: The database of this ApplicationHarnessConfig. + :type database: DatabaseDeploymentConfig + """ + + self._database = database + + @property + def resources(self): + """Gets the resources of this ApplicationHarnessConfig. + + Application file resources. Maps from deploy/resources folder and mounts as configmaps # noqa: E501 + + :return: The resources of this ApplicationHarnessConfig. + :rtype: List[FileResourcesConfig] + """ + return self._resources + + @resources.setter + def resources(self, resources): + """Sets the resources of this ApplicationHarnessConfig. + + Application file resources. Maps from deploy/resources folder and mounts as configmaps # noqa: E501 + + :param resources: The resources of this ApplicationHarnessConfig. + :type resources: List[FileResourcesConfig] + """ + + self._resources = resources + + @property + def readiness_probe(self): + """Gets the readiness_probe of this ApplicationHarnessConfig. + + + :return: The readiness_probe of this ApplicationHarnessConfig. + :rtype: ApplicationProbe + """ + return self._readiness_probe + + @readiness_probe.setter + def readiness_probe(self, readiness_probe): + """Sets the readiness_probe of this ApplicationHarnessConfig. + + + :param readiness_probe: The readiness_probe of this ApplicationHarnessConfig. + :type readiness_probe: ApplicationProbe + """ + + self._readiness_probe = readiness_probe + + @property + def startup_probe(self): + """Gets the startup_probe of this ApplicationHarnessConfig. + + + :return: The startup_probe of this ApplicationHarnessConfig. + :rtype: ApplicationProbe + """ + return self._startup_probe + + @startup_probe.setter + def startup_probe(self, startup_probe): + """Sets the startup_probe of this ApplicationHarnessConfig. + + + :param startup_probe: The startup_probe of this ApplicationHarnessConfig. + :type startup_probe: ApplicationProbe + """ + + self._startup_probe = startup_probe + + @property + def liveness_probe(self): + """Gets the liveness_probe of this ApplicationHarnessConfig. + + + :return: The liveness_probe of this ApplicationHarnessConfig. + :rtype: ApplicationProbe + """ + return self._liveness_probe + + @liveness_probe.setter + def liveness_probe(self, liveness_probe): + """Sets the liveness_probe of this ApplicationHarnessConfig. + + + :param liveness_probe: The liveness_probe of this ApplicationHarnessConfig. + :type liveness_probe: ApplicationProbe + """ + + self._liveness_probe = liveness_probe + + @property + def source_root(self): + """Gets the source_root of this ApplicationHarnessConfig. + + + :return: The source_root of this ApplicationHarnessConfig. + :rtype: str + """ + return self._source_root + + @source_root.setter + def source_root(self, source_root): + """Sets the source_root of this ApplicationHarnessConfig. + + + :param source_root: The source_root of this ApplicationHarnessConfig. + :type source_root: str + """ + if source_root is not None and not re.search(r'^[^<>:;,?*|]+$', source_root): # noqa: E501 + raise ValueError("Invalid value for `source_root`, must be a follow pattern or equal to `/^[^<>:;,?*|]+$/`") # noqa: E501 + + self._source_root = source_root + + @property + def name(self): + """Gets the name of this ApplicationHarnessConfig. + + + :return: The name of this ApplicationHarnessConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ApplicationHarnessConfig. + + + :param name: The name of this ApplicationHarnessConfig. + :type name: str + """ + + self._name = name + + @property + def jupyterhub(self): + """Gets the jupyterhub of this ApplicationHarnessConfig. + + + :return: The jupyterhub of this ApplicationHarnessConfig. + :rtype: JupyterHubConfig + """ + return self._jupyterhub + + @jupyterhub.setter + def jupyterhub(self, jupyterhub): + """Sets the jupyterhub of this ApplicationHarnessConfig. + + + :param jupyterhub: The jupyterhub of this ApplicationHarnessConfig. + :type jupyterhub: JupyterHubConfig + """ + + self._jupyterhub = jupyterhub diff --git a/libraries/models/cloudharness_model/models/application_probe.py b/libraries/models/cloudharness_model/models/application_probe.py new file mode 100644 index 00000000..bf5d28ea --- /dev/null +++ b/libraries/models/cloudharness_model/models/application_probe.py @@ -0,0 +1,144 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class ApplicationProbe(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, path=None, period_seconds=None, failure_threshold=None, initial_delay_seconds=None): # noqa: E501 + """ApplicationProbe - a model defined in OpenAPI + + :param path: The path of this ApplicationProbe. # noqa: E501 + :type path: str + :param period_seconds: The period_seconds of this ApplicationProbe. # noqa: E501 + :type period_seconds: float + :param failure_threshold: The failure_threshold of this ApplicationProbe. # noqa: E501 + :type failure_threshold: float + :param initial_delay_seconds: The initial_delay_seconds of this ApplicationProbe. # noqa: E501 + :type initial_delay_seconds: float + """ + self.openapi_types = { + 'path': str, + 'period_seconds': float, + 'failure_threshold': float, + 'initial_delay_seconds': float + } + + self.attribute_map = { + 'path': 'path', + 'period_seconds': 'periodSeconds', + 'failure_threshold': 'failureThreshold', + 'initial_delay_seconds': 'initialDelaySeconds' + } + + self._path = path + self._period_seconds = period_seconds + self._failure_threshold = failure_threshold + self._initial_delay_seconds = initial_delay_seconds + + @classmethod + def from_dict(cls, dikt) -> 'ApplicationProbe': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The ApplicationProbe of this ApplicationProbe. # noqa: E501 + :rtype: ApplicationProbe + """ + return util.deserialize_model(dikt, cls) + + @property + def path(self): + """Gets the path of this ApplicationProbe. + + + :return: The path of this ApplicationProbe. + :rtype: str + """ + return self._path + + @path.setter + def path(self, path): + """Sets the path of this ApplicationProbe. + + + :param path: The path of this ApplicationProbe. + :type path: str + """ + if path is None: + raise ValueError("Invalid value for `path`, must not be `None`") # noqa: E501 + + self._path = path + + @property + def period_seconds(self): + """Gets the period_seconds of this ApplicationProbe. + + + :return: The period_seconds of this ApplicationProbe. + :rtype: float + """ + return self._period_seconds + + @period_seconds.setter + def period_seconds(self, period_seconds): + """Sets the period_seconds of this ApplicationProbe. + + + :param period_seconds: The period_seconds of this ApplicationProbe. + :type period_seconds: float + """ + + self._period_seconds = period_seconds + + @property + def failure_threshold(self): + """Gets the failure_threshold of this ApplicationProbe. + + + :return: The failure_threshold of this ApplicationProbe. + :rtype: float + """ + return self._failure_threshold + + @failure_threshold.setter + def failure_threshold(self, failure_threshold): + """Sets the failure_threshold of this ApplicationProbe. + + + :param failure_threshold: The failure_threshold of this ApplicationProbe. + :type failure_threshold: float + """ + + self._failure_threshold = failure_threshold + + @property + def initial_delay_seconds(self): + """Gets the initial_delay_seconds of this ApplicationProbe. + + + :return: The initial_delay_seconds of this ApplicationProbe. + :rtype: float + """ + return self._initial_delay_seconds + + @initial_delay_seconds.setter + def initial_delay_seconds(self, initial_delay_seconds): + """Sets the initial_delay_seconds of this ApplicationProbe. + + + :param initial_delay_seconds: The initial_delay_seconds of this ApplicationProbe. + :type initial_delay_seconds: float + """ + + self._initial_delay_seconds = initial_delay_seconds diff --git a/libraries/models/cloudharness_model/models/auto_artifact_spec.py b/libraries/models/cloudharness_model/models/auto_artifact_spec.py new file mode 100644 index 00000000..402d8ddc --- /dev/null +++ b/libraries/models/cloudharness_model/models/auto_artifact_spec.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class AutoArtifactSpec(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, auto=None, name=None): # noqa: E501 + """AutoArtifactSpec - a model defined in OpenAPI + + :param auto: The auto of this AutoArtifactSpec. # noqa: E501 + :type auto: bool + :param name: The name of this AutoArtifactSpec. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'auto': bool, + 'name': str + } + + self.attribute_map = { + 'auto': 'auto', + 'name': 'name' + } + + self._auto = auto + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'AutoArtifactSpec': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The AutoArtifactSpec of this AutoArtifactSpec. # noqa: E501 + :rtype: AutoArtifactSpec + """ + return util.deserialize_model(dikt, cls) + + @property + def auto(self): + """Gets the auto of this AutoArtifactSpec. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this AutoArtifactSpec. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto): + """Sets the auto of this AutoArtifactSpec. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this AutoArtifactSpec. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self): + """Gets the name of this AutoArtifactSpec. + + + :return: The name of this AutoArtifactSpec. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this AutoArtifactSpec. + + + :param name: The name of this AutoArtifactSpec. + :type name: str + """ + + self._name = name diff --git a/libraries/models/cloudharness_model/models/backup_config.py b/libraries/models/cloudharness_model/models/backup_config.py new file mode 100644 index 00000000..e45997b4 --- /dev/null +++ b/libraries/models/cloudharness_model/models/backup_config.py @@ -0,0 +1,290 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf +import re +from cloudharness_model import util + +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf # noqa: E501 +import re # noqa: E501 + +class BackupConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, active=None, keep_days=None, keep_weeks=None, keep_months=None, schedule=None, suffix=None, volumesize=None, dir=None, resources=None): # noqa: E501 + """BackupConfig - a model defined in OpenAPI + + :param active: The active of this BackupConfig. # noqa: E501 + :type active: bool + :param keep_days: The keep_days of this BackupConfig. # noqa: E501 + :type keep_days: int + :param keep_weeks: The keep_weeks of this BackupConfig. # noqa: E501 + :type keep_weeks: int + :param keep_months: The keep_months of this BackupConfig. # noqa: E501 + :type keep_months: int + :param schedule: The schedule of this BackupConfig. # noqa: E501 + :type schedule: str + :param suffix: The suffix of this BackupConfig. # noqa: E501 + :type suffix: object + :param volumesize: The volumesize of this BackupConfig. # noqa: E501 + :type volumesize: str + :param dir: The dir of this BackupConfig. # noqa: E501 + :type dir: str + :param resources: The resources of this BackupConfig. # noqa: E501 + :type resources: DeploymentResourcesConf + """ + self.openapi_types = { + 'active': bool, + 'keep_days': int, + 'keep_weeks': int, + 'keep_months': int, + 'schedule': str, + 'suffix': object, + 'volumesize': str, + 'dir': str, + 'resources': DeploymentResourcesConf + } + + self.attribute_map = { + 'active': 'active', + 'keep_days': 'keep_days', + 'keep_weeks': 'keep_weeks', + 'keep_months': 'keep_months', + 'schedule': 'schedule', + 'suffix': 'suffix', + 'volumesize': 'volumesize', + 'dir': 'dir', + 'resources': 'resources' + } + + self._active = active + self._keep_days = keep_days + self._keep_weeks = keep_weeks + self._keep_months = keep_months + self._schedule = schedule + self._suffix = suffix + self._volumesize = volumesize + self._dir = dir + self._resources = resources + + @classmethod + def from_dict(cls, dikt) -> 'BackupConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The BackupConfig of this BackupConfig. # noqa: E501 + :rtype: BackupConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def active(self): + """Gets the active of this BackupConfig. + + + :return: The active of this BackupConfig. + :rtype: bool + """ + return self._active + + @active.setter + def active(self, active): + """Sets the active of this BackupConfig. + + + :param active: The active of this BackupConfig. + :type active: bool + """ + + self._active = active + + @property + def keep_days(self): + """Gets the keep_days of this BackupConfig. + + + :return: The keep_days of this BackupConfig. + :rtype: int + """ + return self._keep_days + + @keep_days.setter + def keep_days(self, keep_days): + """Sets the keep_days of this BackupConfig. + + + :param keep_days: The keep_days of this BackupConfig. + :type keep_days: int + """ + + self._keep_days = keep_days + + @property + def keep_weeks(self): + """Gets the keep_weeks of this BackupConfig. + + + :return: The keep_weeks of this BackupConfig. + :rtype: int + """ + return self._keep_weeks + + @keep_weeks.setter + def keep_weeks(self, keep_weeks): + """Sets the keep_weeks of this BackupConfig. + + + :param keep_weeks: The keep_weeks of this BackupConfig. + :type keep_weeks: int + """ + + self._keep_weeks = keep_weeks + + @property + def keep_months(self): + """Gets the keep_months of this BackupConfig. + + + :return: The keep_months of this BackupConfig. + :rtype: int + """ + return self._keep_months + + @keep_months.setter + def keep_months(self, keep_months): + """Sets the keep_months of this BackupConfig. + + + :param keep_months: The keep_months of this BackupConfig. + :type keep_months: int + """ + + self._keep_months = keep_months + + @property + def schedule(self): + """Gets the schedule of this BackupConfig. + + Cron expression # noqa: E501 + + :return: The schedule of this BackupConfig. + :rtype: str + """ + return self._schedule + + @schedule.setter + def schedule(self, schedule): + """Sets the schedule of this BackupConfig. + + Cron expression # noqa: E501 + + :param schedule: The schedule of this BackupConfig. + :type schedule: str + """ + if schedule is not None and not re.search(r'(@(annually|yearly|monthly|weekly|daily|hourly|reboot))|(@every (\d+(ns|us|µs|ms|s|m|h))+)|((((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|\*) ?){5,7})', schedule): # noqa: E501 + raise ValueError("Invalid value for `schedule`, must be a follow pattern or equal to `/(@(annually|yearly|monthly|weekly|daily|hourly|reboot))|(@every (\d+(ns|us|µs|ms|s|m|h))+)|((((\d+,)+\d+|(\d+(\/|-)\d+)|\d+|\*) ?){5,7})/`") # noqa: E501 + + self._schedule = schedule + + @property + def suffix(self): + """Gets the suffix of this BackupConfig. + + The file suffix added to backup files # noqa: E501 + + :return: The suffix of this BackupConfig. + :rtype: object + """ + return self._suffix + + @suffix.setter + def suffix(self, suffix): + """Sets the suffix of this BackupConfig. + + The file suffix added to backup files # noqa: E501 + + :param suffix: The suffix of this BackupConfig. + :type suffix: object + """ + + self._suffix = suffix + + @property + def volumesize(self): + """Gets the volumesize of this BackupConfig. + + The volume size for backups (all backups share the same volume) # noqa: E501 + + :return: The volumesize of this BackupConfig. + :rtype: str + """ + return self._volumesize + + @volumesize.setter + def volumesize(self, volumesize): + """Sets the volumesize of this BackupConfig. + + The volume size for backups (all backups share the same volume) # noqa: E501 + + :param volumesize: The volumesize of this BackupConfig. + :type volumesize: str + """ + + self._volumesize = volumesize + + @property + def dir(self): + """Gets the dir of this BackupConfig. + + + :return: The dir of this BackupConfig. + :rtype: str + """ + return self._dir + + @dir.setter + def dir(self, dir): + """Sets the dir of this BackupConfig. + + + :param dir: The dir of this BackupConfig. + :type dir: str + """ + if dir is None: + raise ValueError("Invalid value for `dir`, must not be `None`") # noqa: E501 + if dir is not None and not re.search(r'^[^<>:;,?*|]+$', dir): # noqa: E501 + raise ValueError("Invalid value for `dir`, must be a follow pattern or equal to `/^[^<>:;,?*|]+$/`") # noqa: E501 + + self._dir = dir + + @property + def resources(self): + """Gets the resources of this BackupConfig. + + + :return: The resources of this BackupConfig. + :rtype: DeploymentResourcesConf + """ + return self._resources + + @resources.setter + def resources(self, resources): + """Sets the resources of this BackupConfig. + + + :param resources: The resources of this BackupConfig. + :type resources: DeploymentResourcesConf + """ + if resources is None: + raise ValueError("Invalid value for `resources`, must not be `None`") # noqa: E501 + + self._resources = resources diff --git a/libraries/models/cloudharness_model/models/base_model_.py b/libraries/models/cloudharness_model/models/base_model_.py new file mode 100644 index 00000000..17aea9c2 --- /dev/null +++ b/libraries/models/cloudharness_model/models/base_model_.py @@ -0,0 +1,90 @@ +import pprint + +import six +import typing + +from cloudharness_model import util + +T = typing.TypeVar('T') + + +class Model(object): + # openapiTypes: The key is attribute name and the + # value is attribute type. + openapi_types = {} + + # attributeMap: The key is attribute name and the + # value is json key in definition. + attribute_map = {} + + def __init__(self): + self._raw_dict = {} + + @classmethod + def from_dict(cls: typing.Type[T], dikt) -> T: + """Returns the dict as a model""" + obj = util.deserialize_model(dikt, cls) + return obj + + def __getitem__(self, key): + if hasattr(self, key): + return getattr(self, key) + return self._raw_dict[key] + + + def __contains__(self, key): + if key in self.attribute_map: + return True + elif hasattr(self, "_raw_dict"): + return key in self._raw_dict + return False + + def to_dict(self): + """Returns the model properties as a dict + + :rtype: dict + """ + result = {} + + for attr, _ in six.iteritems(self.openapi_types): + value = getattr(self, attr) + if isinstance(value, list): + result[attr] = list(map( + lambda x: x.to_dict() if hasattr(x, "to_dict") else x, + value + )) + elif hasattr(value, "to_dict"): + result[attr] = value.to_dict() + elif isinstance(value, dict): + result[attr] = dict(map( + lambda item: (item[0], item[1].to_dict()) + if hasattr(item[1], "to_dict") else item, + value.items() + )) + else: + result[attr] = value + + if hasattr(self, "raw_dict"): + merged = dict(self.raw_dict) + merged.update(result) + return merged + return result + + def to_str(self): + """Returns the string representation of the model + + :rtype: str + """ + return pprint.pformat(self.to_dict()) + + def __repr__(self): + """For `print` and `pprint`""" + return self.to_str() + + def __eq__(self, other): + """Returns true if both objects are equal""" + return self.__dict__ == other.__dict__ + + def __ne__(self, other): + """Returns true if both objects are not equal""" + return not self == other diff --git a/libraries/models/cloudharness_model/models/cdc_event.py b/libraries/models/cloudharness_model/models/cdc_event.py new file mode 100644 index 00000000..bcf92620 --- /dev/null +++ b/libraries/models/cloudharness_model/models/cdc_event.py @@ -0,0 +1,188 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.cdc_event_meta import CDCEventMeta +from cloudharness_model import util + +from cloudharness_model.models.cdc_event_meta import CDCEventMeta # noqa: E501 + +class CDCEvent(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, operation=None, uid=None, message_type=None, resource=None, meta=None): # noqa: E501 + """CDCEvent - a model defined in OpenAPI + + :param operation: The operation of this CDCEvent. # noqa: E501 + :type operation: str + :param uid: The uid of this CDCEvent. # noqa: E501 + :type uid: str + :param message_type: The message_type of this CDCEvent. # noqa: E501 + :type message_type: str + :param resource: The resource of this CDCEvent. # noqa: E501 + :type resource: Dict[str, object] + :param meta: The meta of this CDCEvent. # noqa: E501 + :type meta: CDCEventMeta + """ + self.openapi_types = { + 'operation': str, + 'uid': str, + 'message_type': str, + 'resource': Dict[str, object], + 'meta': CDCEventMeta + } + + self.attribute_map = { + 'operation': 'operation', + 'uid': 'uid', + 'message_type': 'message_type', + 'resource': 'resource', + 'meta': 'meta' + } + + self._operation = operation + self._uid = uid + self._message_type = message_type + self._resource = resource + self._meta = meta + + @classmethod + def from_dict(cls, dikt) -> 'CDCEvent': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The CDCEvent of this CDCEvent. # noqa: E501 + :rtype: CDCEvent + """ + return util.deserialize_model(dikt, cls) + + @property + def operation(self): + """Gets the operation of this CDCEvent. + + the operation on the object e.g. create / update / delete # noqa: E501 + + :return: The operation of this CDCEvent. + :rtype: str + """ + return self._operation + + @operation.setter + def operation(self, operation): + """Sets the operation of this CDCEvent. + + the operation on the object e.g. create / update / delete # noqa: E501 + + :param operation: The operation of this CDCEvent. + :type operation: str + """ + allowed_values = ["create", "update", "delete", "other"] # noqa: E501 + if operation not in allowed_values: + raise ValueError( + "Invalid value for `operation` ({0}), must be one of {1}" + .format(operation, allowed_values) + ) + + self._operation = operation + + @property + def uid(self): + """Gets the uid of this CDCEvent. + + the unique identifier attribute of the object # noqa: E501 + + :return: The uid of this CDCEvent. + :rtype: str + """ + return self._uid + + @uid.setter + def uid(self, uid): + """Sets the uid of this CDCEvent. + + the unique identifier attribute of the object # noqa: E501 + + :param uid: The uid of this CDCEvent. + :type uid: str + """ + if uid is None: + raise ValueError("Invalid value for `uid`, must not be `None`") # noqa: E501 + + self._uid = uid + + @property + def message_type(self): + """Gets the message_type of this CDCEvent. + + the type of the message (relates to the object type) e.g. jobs # noqa: E501 + + :return: The message_type of this CDCEvent. + :rtype: str + """ + return self._message_type + + @message_type.setter + def message_type(self, message_type): + """Sets the message_type of this CDCEvent. + + the type of the message (relates to the object type) e.g. jobs # noqa: E501 + + :param message_type: The message_type of this CDCEvent. + :type message_type: str + """ + if message_type is None: + raise ValueError("Invalid value for `message_type`, must not be `None`") # noqa: E501 + + self._message_type = message_type + + @property + def resource(self): + """Gets the resource of this CDCEvent. + + + :return: The resource of this CDCEvent. + :rtype: Dict[str, object] + """ + return self._resource + + @resource.setter + def resource(self, resource): + """Sets the resource of this CDCEvent. + + + :param resource: The resource of this CDCEvent. + :type resource: Dict[str, object] + """ + + self._resource = resource + + @property + def meta(self): + """Gets the meta of this CDCEvent. + + + :return: The meta of this CDCEvent. + :rtype: CDCEventMeta + """ + return self._meta + + @meta.setter + def meta(self, meta): + """Sets the meta of this CDCEvent. + + + :param meta: The meta of this CDCEvent. + :type meta: CDCEventMeta + """ + if meta is None: + raise ValueError("Invalid value for `meta`, must not be `None`") # noqa: E501 + + self._meta = meta diff --git a/libraries/models/cloudharness_model/models/cdc_event_meta.py b/libraries/models/cloudharness_model/models/cdc_event_meta.py new file mode 100644 index 00000000..d88e57d1 --- /dev/null +++ b/libraries/models/cloudharness_model/models/cdc_event_meta.py @@ -0,0 +1,180 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.user import User +from cloudharness_model import util + +from cloudharness_model.models.user import User # noqa: E501 + +class CDCEventMeta(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, app_name=None, user=None, args=None, kwargs=None, description=None): # noqa: E501 + """CDCEventMeta - a model defined in OpenAPI + + :param app_name: The app_name of this CDCEventMeta. # noqa: E501 + :type app_name: str + :param user: The user of this CDCEventMeta. # noqa: E501 + :type user: User + :param args: The args of this CDCEventMeta. # noqa: E501 + :type args: List[Dict] + :param kwargs: The kwargs of this CDCEventMeta. # noqa: E501 + :type kwargs: object + :param description: The description of this CDCEventMeta. # noqa: E501 + :type description: str + """ + self.openapi_types = { + 'app_name': str, + 'user': User, + 'args': List[Dict], + 'kwargs': object, + 'description': str + } + + self.attribute_map = { + 'app_name': 'app_name', + 'user': 'user', + 'args': 'args', + 'kwargs': 'kwargs', + 'description': 'description' + } + + self._app_name = app_name + self._user = user + self._args = args + self._kwargs = kwargs + self._description = description + + @classmethod + def from_dict(cls, dikt) -> 'CDCEventMeta': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The CDCEventMeta of this CDCEventMeta. # noqa: E501 + :rtype: CDCEventMeta + """ + return util.deserialize_model(dikt, cls) + + @property + def app_name(self): + """Gets the app_name of this CDCEventMeta. + + The name of the application/microservice sending the message # noqa: E501 + + :return: The app_name of this CDCEventMeta. + :rtype: str + """ + return self._app_name + + @app_name.setter + def app_name(self, app_name): + """Sets the app_name of this CDCEventMeta. + + The name of the application/microservice sending the message # noqa: E501 + + :param app_name: The app_name of this CDCEventMeta. + :type app_name: str + """ + if app_name is None: + raise ValueError("Invalid value for `app_name`, must not be `None`") # noqa: E501 + + self._app_name = app_name + + @property + def user(self): + """Gets the user of this CDCEventMeta. + + + :return: The user of this CDCEventMeta. + :rtype: User + """ + return self._user + + @user.setter + def user(self, user): + """Sets the user of this CDCEventMeta. + + + :param user: The user of this CDCEventMeta. + :type user: User + """ + + self._user = user + + @property + def args(self): + """Gets the args of this CDCEventMeta. + + the caller function arguments # noqa: E501 + + :return: The args of this CDCEventMeta. + :rtype: List[Dict] + """ + return self._args + + @args.setter + def args(self, args): + """Sets the args of this CDCEventMeta. + + the caller function arguments # noqa: E501 + + :param args: The args of this CDCEventMeta. + :type args: List[Dict] + """ + + self._args = args + + @property + def kwargs(self): + """Gets the kwargs of this CDCEventMeta. + + the caller function keyword arguments # noqa: E501 + + :return: The kwargs of this CDCEventMeta. + :rtype: object + """ + return self._kwargs + + @kwargs.setter + def kwargs(self, kwargs): + """Sets the kwargs of this CDCEventMeta. + + the caller function keyword arguments # noqa: E501 + + :param kwargs: The kwargs of this CDCEventMeta. + :type kwargs: object + """ + + self._kwargs = kwargs + + @property + def description(self): + """Gets the description of this CDCEventMeta. + + General description -- for human consumption # noqa: E501 + + :return: The description of this CDCEventMeta. + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this CDCEventMeta. + + General description -- for human consumption # noqa: E501 + + :param description: The description of this CDCEventMeta. + :type description: str + """ + + self._description = description diff --git a/libraries/models/cloudharness_model/models/cpu_memory_config.py b/libraries/models/cloudharness_model/models/cpu_memory_config.py new file mode 100644 index 00000000..eeb3e3b0 --- /dev/null +++ b/libraries/models/cloudharness_model/models/cpu_memory_config.py @@ -0,0 +1,90 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class CpuMemoryConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, cpu=None, memory=None): # noqa: E501 + """CpuMemoryConfig - a model defined in OpenAPI + + :param cpu: The cpu of this CpuMemoryConfig. # noqa: E501 + :type cpu: str + :param memory: The memory of this CpuMemoryConfig. # noqa: E501 + :type memory: str + """ + self.openapi_types = { + 'cpu': str, + 'memory': str + } + + self.attribute_map = { + 'cpu': 'cpu', + 'memory': 'memory' + } + + self._cpu = cpu + self._memory = memory + + @classmethod + def from_dict(cls, dikt) -> 'CpuMemoryConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The CpuMemoryConfig of this CpuMemoryConfig. # noqa: E501 + :rtype: CpuMemoryConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def cpu(self): + """Gets the cpu of this CpuMemoryConfig. + + + :return: The cpu of this CpuMemoryConfig. + :rtype: str + """ + return self._cpu + + @cpu.setter + def cpu(self, cpu): + """Sets the cpu of this CpuMemoryConfig. + + + :param cpu: The cpu of this CpuMemoryConfig. + :type cpu: str + """ + + self._cpu = cpu + + @property + def memory(self): + """Gets the memory of this CpuMemoryConfig. + + + :return: The memory of this CpuMemoryConfig. + :rtype: str + """ + return self._memory + + @memory.setter + def memory(self, memory): + """Sets the memory of this CpuMemoryConfig. + + + :param memory: The memory of this CpuMemoryConfig. + :type memory: str + """ + + self._memory = memory diff --git a/libraries/models/cloudharness_model/models/database_deployment_config.py b/libraries/models/cloudharness_model/models/database_deployment_config.py new file mode 100644 index 00000000..609544b8 --- /dev/null +++ b/libraries/models/cloudharness_model/models/database_deployment_config.py @@ -0,0 +1,322 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec +from cloudharness_model.models.database_deployment_config_all_of import DatabaseDeploymentConfigAllOf +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf +import re +from cloudharness_model import util + +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec # noqa: E501 +from cloudharness_model.models.database_deployment_config_all_of import DatabaseDeploymentConfigAllOf # noqa: E501 +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf # noqa: E501 +import re # noqa: E501 + +class DatabaseDeploymentConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, type=None, size=None, user=None, _pass=None, mongo=None, postgres=None, neo4j=None, resources=None, auto=None, name=None): # noqa: E501 + """DatabaseDeploymentConfig - a model defined in OpenAPI + + :param type: The type of this DatabaseDeploymentConfig. # noqa: E501 + :type type: str + :param size: The size of this DatabaseDeploymentConfig. # noqa: E501 + :type size: str + :param user: The user of this DatabaseDeploymentConfig. # noqa: E501 + :type user: str + :param _pass: The _pass of this DatabaseDeploymentConfig. # noqa: E501 + :type _pass: str + :param mongo: The mongo of this DatabaseDeploymentConfig. # noqa: E501 + :type mongo: Dict[str, object] + :param postgres: The postgres of this DatabaseDeploymentConfig. # noqa: E501 + :type postgres: Dict[str, object] + :param neo4j: The neo4j of this DatabaseDeploymentConfig. # noqa: E501 + :type neo4j: object + :param resources: The resources of this DatabaseDeploymentConfig. # noqa: E501 + :type resources: DeploymentResourcesConf + :param auto: The auto of this DatabaseDeploymentConfig. # noqa: E501 + :type auto: bool + :param name: The name of this DatabaseDeploymentConfig. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'type': str, + 'size': str, + 'user': str, + '_pass': str, + 'mongo': Dict[str, object], + 'postgres': Dict[str, object], + 'neo4j': object, + 'resources': DeploymentResourcesConf, + 'auto': bool, + 'name': str + } + + self.attribute_map = { + 'type': 'type', + 'size': 'size', + 'user': 'user', + '_pass': 'pass', + 'mongo': 'mongo', + 'postgres': 'postgres', + 'neo4j': 'neo4j', + 'resources': 'resources', + 'auto': 'auto', + 'name': 'name' + } + + self._type = type + self._size = size + self._user = user + self.__pass = _pass + self._mongo = mongo + self._postgres = postgres + self._neo4j = neo4j + self._resources = resources + self._auto = auto + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'DatabaseDeploymentConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The DatabaseDeploymentConfig of this DatabaseDeploymentConfig. # noqa: E501 + :rtype: DatabaseDeploymentConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def type(self): + """Gets the type of this DatabaseDeploymentConfig. + + Define the database type. One of (mongo, postgres, neo4j) # noqa: E501 + + :return: The type of this DatabaseDeploymentConfig. + :rtype: str + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this DatabaseDeploymentConfig. + + Define the database type. One of (mongo, postgres, neo4j) # noqa: E501 + + :param type: The type of this DatabaseDeploymentConfig. + :type type: str + """ + if type is not None and not re.search(r'^(mongo|postgres|neo4j)$', type): # noqa: E501 + raise ValueError("Invalid value for `type`, must be a follow pattern or equal to `/^(mongo|postgres|neo4j)$/`") # noqa: E501 + + self._type = type + + @property + def size(self): + """Gets the size of this DatabaseDeploymentConfig. + + Specify database disk size # noqa: E501 + + :return: The size of this DatabaseDeploymentConfig. + :rtype: str + """ + return self._size + + @size.setter + def size(self, size): + """Sets the size of this DatabaseDeploymentConfig. + + Specify database disk size # noqa: E501 + + :param size: The size of this DatabaseDeploymentConfig. + :type size: str + """ + + self._size = size + + @property + def user(self): + """Gets the user of this DatabaseDeploymentConfig. + + database username # noqa: E501 + + :return: The user of this DatabaseDeploymentConfig. + :rtype: str + """ + return self._user + + @user.setter + def user(self, user): + """Sets the user of this DatabaseDeploymentConfig. + + database username # noqa: E501 + + :param user: The user of this DatabaseDeploymentConfig. + :type user: str + """ + + self._user = user + + @property + def _pass(self): + """Gets the _pass of this DatabaseDeploymentConfig. + + Database password # noqa: E501 + + :return: The _pass of this DatabaseDeploymentConfig. + :rtype: str + """ + return self.__pass + + @_pass.setter + def _pass(self, _pass): + """Sets the _pass of this DatabaseDeploymentConfig. + + Database password # noqa: E501 + + :param _pass: The _pass of this DatabaseDeploymentConfig. + :type _pass: str + """ + + self.__pass = _pass + + @property + def mongo(self): + """Gets the mongo of this DatabaseDeploymentConfig. + + + :return: The mongo of this DatabaseDeploymentConfig. + :rtype: Dict[str, object] + """ + return self._mongo + + @mongo.setter + def mongo(self, mongo): + """Sets the mongo of this DatabaseDeploymentConfig. + + + :param mongo: The mongo of this DatabaseDeploymentConfig. + :type mongo: Dict[str, object] + """ + + self._mongo = mongo + + @property + def postgres(self): + """Gets the postgres of this DatabaseDeploymentConfig. + + + :return: The postgres of this DatabaseDeploymentConfig. + :rtype: Dict[str, object] + """ + return self._postgres + + @postgres.setter + def postgres(self, postgres): + """Sets the postgres of this DatabaseDeploymentConfig. + + + :param postgres: The postgres of this DatabaseDeploymentConfig. + :type postgres: Dict[str, object] + """ + + self._postgres = postgres + + @property + def neo4j(self): + """Gets the neo4j of this DatabaseDeploymentConfig. + + Neo4j database specific configuration # noqa: E501 + + :return: The neo4j of this DatabaseDeploymentConfig. + :rtype: object + """ + return self._neo4j + + @neo4j.setter + def neo4j(self, neo4j): + """Sets the neo4j of this DatabaseDeploymentConfig. + + Neo4j database specific configuration # noqa: E501 + + :param neo4j: The neo4j of this DatabaseDeploymentConfig. + :type neo4j: object + """ + + self._neo4j = neo4j + + @property + def resources(self): + """Gets the resources of this DatabaseDeploymentConfig. + + + :return: The resources of this DatabaseDeploymentConfig. + :rtype: DeploymentResourcesConf + """ + return self._resources + + @resources.setter + def resources(self, resources): + """Sets the resources of this DatabaseDeploymentConfig. + + + :param resources: The resources of this DatabaseDeploymentConfig. + :type resources: DeploymentResourcesConf + """ + + self._resources = resources + + @property + def auto(self): + """Gets the auto of this DatabaseDeploymentConfig. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this DatabaseDeploymentConfig. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto): + """Sets the auto of this DatabaseDeploymentConfig. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this DatabaseDeploymentConfig. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self): + """Gets the name of this DatabaseDeploymentConfig. + + + :return: The name of this DatabaseDeploymentConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this DatabaseDeploymentConfig. + + + :param name: The name of this DatabaseDeploymentConfig. + :type name: str + """ + + self._name = name diff --git a/libraries/models/cloudharness_model/models/database_deployment_config_all_of.py b/libraries/models/cloudharness_model/models/database_deployment_config_all_of.py new file mode 100644 index 00000000..0605c873 --- /dev/null +++ b/libraries/models/cloudharness_model/models/database_deployment_config_all_of.py @@ -0,0 +1,262 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf +import re +from cloudharness_model import util + +from cloudharness_model.models.deployment_resources_conf import DeploymentResourcesConf # noqa: E501 +import re # noqa: E501 + +class DatabaseDeploymentConfigAllOf(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, type=None, size=None, user=None, _pass=None, mongo=None, postgres=None, neo4j=None, resources=None): # noqa: E501 + """DatabaseDeploymentConfigAllOf - a model defined in OpenAPI + + :param type: The type of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type type: str + :param size: The size of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type size: str + :param user: The user of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type user: str + :param _pass: The _pass of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type _pass: str + :param mongo: The mongo of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type mongo: Dict[str, object] + :param postgres: The postgres of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type postgres: Dict[str, object] + :param neo4j: The neo4j of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type neo4j: object + :param resources: The resources of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :type resources: DeploymentResourcesConf + """ + self.openapi_types = { + 'type': str, + 'size': str, + 'user': str, + '_pass': str, + 'mongo': Dict[str, object], + 'postgres': Dict[str, object], + 'neo4j': object, + 'resources': DeploymentResourcesConf + } + + self.attribute_map = { + 'type': 'type', + 'size': 'size', + 'user': 'user', + '_pass': 'pass', + 'mongo': 'mongo', + 'postgres': 'postgres', + 'neo4j': 'neo4j', + 'resources': 'resources' + } + + self._type = type + self._size = size + self._user = user + self.__pass = _pass + self._mongo = mongo + self._postgres = postgres + self._neo4j = neo4j + self._resources = resources + + @classmethod + def from_dict(cls, dikt) -> 'DatabaseDeploymentConfigAllOf': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The DatabaseDeploymentConfig_allOf of this DatabaseDeploymentConfigAllOf. # noqa: E501 + :rtype: DatabaseDeploymentConfigAllOf + """ + return util.deserialize_model(dikt, cls) + + @property + def type(self): + """Gets the type of this DatabaseDeploymentConfigAllOf. + + Define the database type. One of (mongo, postgres, neo4j) # noqa: E501 + + :return: The type of this DatabaseDeploymentConfigAllOf. + :rtype: str + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this DatabaseDeploymentConfigAllOf. + + Define the database type. One of (mongo, postgres, neo4j) # noqa: E501 + + :param type: The type of this DatabaseDeploymentConfigAllOf. + :type type: str + """ + if type is not None and not re.search(r'^(mongo|postgres|neo4j)$', type): # noqa: E501 + raise ValueError("Invalid value for `type`, must be a follow pattern or equal to `/^(mongo|postgres|neo4j)$/`") # noqa: E501 + + self._type = type + + @property + def size(self): + """Gets the size of this DatabaseDeploymentConfigAllOf. + + Specify database disk size # noqa: E501 + + :return: The size of this DatabaseDeploymentConfigAllOf. + :rtype: str + """ + return self._size + + @size.setter + def size(self, size): + """Sets the size of this DatabaseDeploymentConfigAllOf. + + Specify database disk size # noqa: E501 + + :param size: The size of this DatabaseDeploymentConfigAllOf. + :type size: str + """ + + self._size = size + + @property + def user(self): + """Gets the user of this DatabaseDeploymentConfigAllOf. + + database username # noqa: E501 + + :return: The user of this DatabaseDeploymentConfigAllOf. + :rtype: str + """ + return self._user + + @user.setter + def user(self, user): + """Sets the user of this DatabaseDeploymentConfigAllOf. + + database username # noqa: E501 + + :param user: The user of this DatabaseDeploymentConfigAllOf. + :type user: str + """ + + self._user = user + + @property + def _pass(self): + """Gets the _pass of this DatabaseDeploymentConfigAllOf. + + Database password # noqa: E501 + + :return: The _pass of this DatabaseDeploymentConfigAllOf. + :rtype: str + """ + return self.__pass + + @_pass.setter + def _pass(self, _pass): + """Sets the _pass of this DatabaseDeploymentConfigAllOf. + + Database password # noqa: E501 + + :param _pass: The _pass of this DatabaseDeploymentConfigAllOf. + :type _pass: str + """ + + self.__pass = _pass + + @property + def mongo(self): + """Gets the mongo of this DatabaseDeploymentConfigAllOf. + + + :return: The mongo of this DatabaseDeploymentConfigAllOf. + :rtype: Dict[str, object] + """ + return self._mongo + + @mongo.setter + def mongo(self, mongo): + """Sets the mongo of this DatabaseDeploymentConfigAllOf. + + + :param mongo: The mongo of this DatabaseDeploymentConfigAllOf. + :type mongo: Dict[str, object] + """ + + self._mongo = mongo + + @property + def postgres(self): + """Gets the postgres of this DatabaseDeploymentConfigAllOf. + + + :return: The postgres of this DatabaseDeploymentConfigAllOf. + :rtype: Dict[str, object] + """ + return self._postgres + + @postgres.setter + def postgres(self, postgres): + """Sets the postgres of this DatabaseDeploymentConfigAllOf. + + + :param postgres: The postgres of this DatabaseDeploymentConfigAllOf. + :type postgres: Dict[str, object] + """ + + self._postgres = postgres + + @property + def neo4j(self): + """Gets the neo4j of this DatabaseDeploymentConfigAllOf. + + Neo4j database specific configuration # noqa: E501 + + :return: The neo4j of this DatabaseDeploymentConfigAllOf. + :rtype: object + """ + return self._neo4j + + @neo4j.setter + def neo4j(self, neo4j): + """Sets the neo4j of this DatabaseDeploymentConfigAllOf. + + Neo4j database specific configuration # noqa: E501 + + :param neo4j: The neo4j of this DatabaseDeploymentConfigAllOf. + :type neo4j: object + """ + + self._neo4j = neo4j + + @property + def resources(self): + """Gets the resources of this DatabaseDeploymentConfigAllOf. + + + :return: The resources of this DatabaseDeploymentConfigAllOf. + :rtype: DeploymentResourcesConf + """ + return self._resources + + @resources.setter + def resources(self, resources): + """Sets the resources of this DatabaseDeploymentConfigAllOf. + + + :param resources: The resources of this DatabaseDeploymentConfigAllOf. + :type resources: DeploymentResourcesConf + """ + + self._resources = resources diff --git a/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py b/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py new file mode 100644 index 00000000..9f3a3748 --- /dev/null +++ b/libraries/models/cloudharness_model/models/deployment_auto_artifact_config.py @@ -0,0 +1,212 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec +import re +from cloudharness_model import util + +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec # noqa: E501 +import re # noqa: E501 + +class DeploymentAutoArtifactConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, port=None, replicas=None, image=None, resources=None, auto=None, name=None): # noqa: E501 + """DeploymentAutoArtifactConfig - a model defined in OpenAPI + + :param port: The port of this DeploymentAutoArtifactConfig. # noqa: E501 + :type port: object + :param replicas: The replicas of this DeploymentAutoArtifactConfig. # noqa: E501 + :type replicas: int + :param image: The image of this DeploymentAutoArtifactConfig. # noqa: E501 + :type image: str + :param resources: The resources of this DeploymentAutoArtifactConfig. # noqa: E501 + :type resources: object + :param auto: The auto of this DeploymentAutoArtifactConfig. # noqa: E501 + :type auto: bool + :param name: The name of this DeploymentAutoArtifactConfig. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'port': object, + 'replicas': int, + 'image': str, + 'resources': object, + 'auto': bool, + 'name': str + } + + self.attribute_map = { + 'port': 'port', + 'replicas': 'replicas', + 'image': 'image', + 'resources': 'resources', + 'auto': 'auto', + 'name': 'name' + } + + self._port = port + self._replicas = replicas + self._image = image + self._resources = resources + self._auto = auto + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'DeploymentAutoArtifactConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The DeploymentAutoArtifactConfig of this DeploymentAutoArtifactConfig. # noqa: E501 + :rtype: DeploymentAutoArtifactConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def port(self): + """Gets the port of this DeploymentAutoArtifactConfig. + + Deployment port # noqa: E501 + + :return: The port of this DeploymentAutoArtifactConfig. + :rtype: object + """ + return self._port + + @port.setter + def port(self, port): + """Sets the port of this DeploymentAutoArtifactConfig. + + Deployment port # noqa: E501 + + :param port: The port of this DeploymentAutoArtifactConfig. + :type port: object + """ + + self._port = port + + @property + def replicas(self): + """Gets the replicas of this DeploymentAutoArtifactConfig. + + Number of replicas # noqa: E501 + + :return: The replicas of this DeploymentAutoArtifactConfig. + :rtype: int + """ + return self._replicas + + @replicas.setter + def replicas(self, replicas): + """Sets the replicas of this DeploymentAutoArtifactConfig. + + Number of replicas # noqa: E501 + + :param replicas: The replicas of this DeploymentAutoArtifactConfig. + :type replicas: int + """ + + self._replicas = replicas + + @property + def image(self): + """Gets the image of this DeploymentAutoArtifactConfig. + + Image name to use in the deployment. Leave it blank to set from the application's Docker file # noqa: E501 + + :return: The image of this DeploymentAutoArtifactConfig. + :rtype: str + """ + return self._image + + @image.setter + def image(self, image): + """Sets the image of this DeploymentAutoArtifactConfig. + + Image name to use in the deployment. Leave it blank to set from the application's Docker file # noqa: E501 + + :param image: The image of this DeploymentAutoArtifactConfig. + :type image: str + """ + if image is not None and not re.search(r'(?:[a-z]+\/)?([a-z]+)(?::[0-9]+)?', image): # noqa: E501 + raise ValueError("Invalid value for `image`, must be a follow pattern or equal to `/(?:[a-z]+\/)?([a-z]+)(?::[0-9]+)?/`") # noqa: E501 + + self._image = image + + @property + def resources(self): + """Gets the resources of this DeploymentAutoArtifactConfig. + + Deployment resources # noqa: E501 + + :return: The resources of this DeploymentAutoArtifactConfig. + :rtype: object + """ + return self._resources + + @resources.setter + def resources(self, resources): + """Sets the resources of this DeploymentAutoArtifactConfig. + + Deployment resources # noqa: E501 + + :param resources: The resources of this DeploymentAutoArtifactConfig. + :type resources: object + """ + + self._resources = resources + + @property + def auto(self): + """Gets the auto of this DeploymentAutoArtifactConfig. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this DeploymentAutoArtifactConfig. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto): + """Sets the auto of this DeploymentAutoArtifactConfig. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this DeploymentAutoArtifactConfig. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self): + """Gets the name of this DeploymentAutoArtifactConfig. + + + :return: The name of this DeploymentAutoArtifactConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this DeploymentAutoArtifactConfig. + + + :param name: The name of this DeploymentAutoArtifactConfig. + :type name: str + """ + + self._name = name diff --git a/libraries/models/cloudharness_model/models/deployment_resources_conf.py b/libraries/models/cloudharness_model/models/deployment_resources_conf.py new file mode 100644 index 00000000..520e2008 --- /dev/null +++ b/libraries/models/cloudharness_model/models/deployment_resources_conf.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.cpu_memory_config import CpuMemoryConfig +from cloudharness_model import util + +from cloudharness_model.models.cpu_memory_config import CpuMemoryConfig # noqa: E501 + +class DeploymentResourcesConf(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, requests=None, limits=None): # noqa: E501 + """DeploymentResourcesConf - a model defined in OpenAPI + + :param requests: The requests of this DeploymentResourcesConf. # noqa: E501 + :type requests: CpuMemoryConfig + :param limits: The limits of this DeploymentResourcesConf. # noqa: E501 + :type limits: CpuMemoryConfig + """ + self.openapi_types = { + 'requests': CpuMemoryConfig, + 'limits': CpuMemoryConfig + } + + self.attribute_map = { + 'requests': 'requests', + 'limits': 'limits' + } + + self._requests = requests + self._limits = limits + + @classmethod + def from_dict(cls, dikt) -> 'DeploymentResourcesConf': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The DeploymentResourcesConf of this DeploymentResourcesConf. # noqa: E501 + :rtype: DeploymentResourcesConf + """ + return util.deserialize_model(dikt, cls) + + @property + def requests(self): + """Gets the requests of this DeploymentResourcesConf. + + + :return: The requests of this DeploymentResourcesConf. + :rtype: CpuMemoryConfig + """ + return self._requests + + @requests.setter + def requests(self, requests): + """Sets the requests of this DeploymentResourcesConf. + + + :param requests: The requests of this DeploymentResourcesConf. + :type requests: CpuMemoryConfig + """ + + self._requests = requests + + @property + def limits(self): + """Gets the limits of this DeploymentResourcesConf. + + + :return: The limits of this DeploymentResourcesConf. + :rtype: CpuMemoryConfig + """ + return self._limits + + @limits.setter + def limits(self, limits): + """Sets the limits of this DeploymentResourcesConf. + + + :param limits: The limits of this DeploymentResourcesConf. + :type limits: CpuMemoryConfig + """ + + self._limits = limits diff --git a/libraries/models/cloudharness_model/models/file_resources_config.py b/libraries/models/cloudharness_model/models/file_resources_config.py new file mode 100644 index 00000000..50709712 --- /dev/null +++ b/libraries/models/cloudharness_model/models/file_resources_config.py @@ -0,0 +1,128 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +import re +from cloudharness_model import util + +import re # noqa: E501 + +class FileResourcesConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, name=None, src=None, dst=None): # noqa: E501 + """FileResourcesConfig - a model defined in OpenAPI + + :param name: The name of this FileResourcesConfig. # noqa: E501 + :type name: str + :param src: The src of this FileResourcesConfig. # noqa: E501 + :type src: str + :param dst: The dst of this FileResourcesConfig. # noqa: E501 + :type dst: str + """ + self.openapi_types = { + 'name': str, + 'src': str, + 'dst': str + } + + self.attribute_map = { + 'name': 'name', + 'src': 'src', + 'dst': 'dst' + } + + self._name = name + self._src = src + self._dst = dst + + @classmethod + def from_dict(cls, dikt) -> 'FileResourcesConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The FileResourcesConfig of this FileResourcesConfig. # noqa: E501 + :rtype: FileResourcesConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def name(self): + """Gets the name of this FileResourcesConfig. + + + :return: The name of this FileResourcesConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this FileResourcesConfig. + + + :param name: The name of this FileResourcesConfig. + :type name: str + """ + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + if name is not None and not re.search(r'^[^<>:;,?*|]+$', name): # noqa: E501 + raise ValueError("Invalid value for `name`, must be a follow pattern or equal to `/^[^<>:;,?*|]+$/`") # noqa: E501 + + self._name = name + + @property + def src(self): + """Gets the src of this FileResourcesConfig. + + + :return: The src of this FileResourcesConfig. + :rtype: str + """ + return self._src + + @src.setter + def src(self, src): + """Sets the src of this FileResourcesConfig. + + + :param src: The src of this FileResourcesConfig. + :type src: str + """ + if src is None: + raise ValueError("Invalid value for `src`, must not be `None`") # noqa: E501 + if src is not None and not re.search(r'^[^<>:;,?*|]+$', src): # noqa: E501 + raise ValueError("Invalid value for `src`, must be a follow pattern or equal to `/^[^<>:;,?*|]+$/`") # noqa: E501 + + self._src = src + + @property + def dst(self): + """Gets the dst of this FileResourcesConfig. + + + :return: The dst of this FileResourcesConfig. + :rtype: str + """ + return self._dst + + @dst.setter + def dst(self, dst): + """Sets the dst of this FileResourcesConfig. + + + :param dst: The dst of this FileResourcesConfig. + :type dst: str + """ + if dst is None: + raise ValueError("Invalid value for `dst`, must not be `None`") # noqa: E501 + + self._dst = dst diff --git a/libraries/models/cloudharness_model/models/harness_main_config.py b/libraries/models/cloudharness_model/models/harness_main_config.py new file mode 100644 index 00000000..d611ce20 --- /dev/null +++ b/libraries/models/cloudharness_model/models/harness_main_config.py @@ -0,0 +1,412 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.application_config import ApplicationConfig +from cloudharness_model.models.backup_config import BackupConfig +from cloudharness_model.models.name_value import NameValue +from cloudharness_model.models.registry_config import RegistryConfig +from cloudharness_model import util + +from cloudharness_model.models.application_config import ApplicationConfig # noqa: E501 +from cloudharness_model.models.backup_config import BackupConfig # noqa: E501 +from cloudharness_model.models.name_value import NameValue # noqa: E501 +from cloudharness_model.models.registry_config import RegistryConfig # noqa: E501 + +class HarnessMainConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, local=None, secured_gatekeepers=None, domain=None, namespace=None, mainapp=None, registry=None, tag=None, apps=None, env=None, privenv=None, backup=None, name=None, task_images=None): # noqa: E501 + """HarnessMainConfig - a model defined in OpenAPI + + :param local: The local of this HarnessMainConfig. # noqa: E501 + :type local: bool + :param secured_gatekeepers: The secured_gatekeepers of this HarnessMainConfig. # noqa: E501 + :type secured_gatekeepers: bool + :param domain: The domain of this HarnessMainConfig. # noqa: E501 + :type domain: str + :param namespace: The namespace of this HarnessMainConfig. # noqa: E501 + :type namespace: str + :param mainapp: The mainapp of this HarnessMainConfig. # noqa: E501 + :type mainapp: str + :param registry: The registry of this HarnessMainConfig. # noqa: E501 + :type registry: RegistryConfig + :param tag: The tag of this HarnessMainConfig. # noqa: E501 + :type tag: str + :param apps: The apps of this HarnessMainConfig. # noqa: E501 + :type apps: Dict[str, ApplicationConfig] + :param env: The env of this HarnessMainConfig. # noqa: E501 + :type env: List[NameValue] + :param privenv: The privenv of this HarnessMainConfig. # noqa: E501 + :type privenv: NameValue + :param backup: The backup of this HarnessMainConfig. # noqa: E501 + :type backup: BackupConfig + :param name: The name of this HarnessMainConfig. # noqa: E501 + :type name: str + :param task_images: The task_images of this HarnessMainConfig. # noqa: E501 + :type task_images: Dict[str, str] + """ + self.openapi_types = { + 'local': bool, + 'secured_gatekeepers': bool, + 'domain': str, + 'namespace': str, + 'mainapp': str, + 'registry': RegistryConfig, + 'tag': str, + 'apps': Dict[str, ApplicationConfig], + 'env': List[NameValue], + 'privenv': NameValue, + 'backup': BackupConfig, + 'name': str, + 'task_images': Dict[str, str] + } + + self.attribute_map = { + 'local': 'local', + 'secured_gatekeepers': 'secured_gatekeepers', + 'domain': 'domain', + 'namespace': 'namespace', + 'mainapp': 'mainapp', + 'registry': 'registry', + 'tag': 'tag', + 'apps': 'apps', + 'env': 'env', + 'privenv': 'privenv', + 'backup': 'backup', + 'name': 'name', + 'task_images': 'task-images' + } + + self._local = local + self._secured_gatekeepers = secured_gatekeepers + self._domain = domain + self._namespace = namespace + self._mainapp = mainapp + self._registry = registry + self._tag = tag + self._apps = apps + self._env = env + self._privenv = privenv + self._backup = backup + self._name = name + self._task_images = task_images + + @classmethod + def from_dict(cls, dikt) -> 'HarnessMainConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The HarnessMainConfig of this HarnessMainConfig. # noqa: E501 + :rtype: HarnessMainConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def local(self): + """Gets the local of this HarnessMainConfig. + + If set to true, local DNS mapping is added to pods. # noqa: E501 + + :return: The local of this HarnessMainConfig. + :rtype: bool + """ + return self._local + + @local.setter + def local(self, local): + """Sets the local of this HarnessMainConfig. + + If set to true, local DNS mapping is added to pods. # noqa: E501 + + :param local: The local of this HarnessMainConfig. + :type local: bool + """ + if local is None: + raise ValueError("Invalid value for `local`, must not be `None`") # noqa: E501 + + self._local = local + + @property + def secured_gatekeepers(self): + """Gets the secured_gatekeepers of this HarnessMainConfig. + + Enables/disables Gatekeepers on secured applications. Set to false for testing/development # noqa: E501 + + :return: The secured_gatekeepers of this HarnessMainConfig. + :rtype: bool + """ + return self._secured_gatekeepers + + @secured_gatekeepers.setter + def secured_gatekeepers(self, secured_gatekeepers): + """Sets the secured_gatekeepers of this HarnessMainConfig. + + Enables/disables Gatekeepers on secured applications. Set to false for testing/development # noqa: E501 + + :param secured_gatekeepers: The secured_gatekeepers of this HarnessMainConfig. + :type secured_gatekeepers: bool + """ + if secured_gatekeepers is None: + raise ValueError("Invalid value for `secured_gatekeepers`, must not be `None`") # noqa: E501 + + self._secured_gatekeepers = secured_gatekeepers + + @property + def domain(self): + """Gets the domain of this HarnessMainConfig. + + The root domain # noqa: E501 + + :return: The domain of this HarnessMainConfig. + :rtype: str + """ + return self._domain + + @domain.setter + def domain(self, domain): + """Sets the domain of this HarnessMainConfig. + + The root domain # noqa: E501 + + :param domain: The domain of this HarnessMainConfig. + :type domain: str + """ + if domain is None: + raise ValueError("Invalid value for `domain`, must not be `None`") # noqa: E501 + + self._domain = domain + + @property + def namespace(self): + """Gets the namespace of this HarnessMainConfig. + + The K8s namespace. # noqa: E501 + + :return: The namespace of this HarnessMainConfig. + :rtype: str + """ + return self._namespace + + @namespace.setter + def namespace(self, namespace): + """Sets the namespace of this HarnessMainConfig. + + The K8s namespace. # noqa: E501 + + :param namespace: The namespace of this HarnessMainConfig. + :type namespace: str + """ + if namespace is None: + raise ValueError("Invalid value for `namespace`, must not be `None`") # noqa: E501 + + self._namespace = namespace + + @property + def mainapp(self): + """Gets the mainapp of this HarnessMainConfig. + + Defines the app to map to the root domain # noqa: E501 + + :return: The mainapp of this HarnessMainConfig. + :rtype: str + """ + return self._mainapp + + @mainapp.setter + def mainapp(self, mainapp): + """Sets the mainapp of this HarnessMainConfig. + + Defines the app to map to the root domain # noqa: E501 + + :param mainapp: The mainapp of this HarnessMainConfig. + :type mainapp: str + """ + if mainapp is None: + raise ValueError("Invalid value for `mainapp`, must not be `None`") # noqa: E501 + + self._mainapp = mainapp + + @property + def registry(self): + """Gets the registry of this HarnessMainConfig. + + + :return: The registry of this HarnessMainConfig. + :rtype: RegistryConfig + """ + return self._registry + + @registry.setter + def registry(self, registry): + """Sets the registry of this HarnessMainConfig. + + + :param registry: The registry of this HarnessMainConfig. + :type registry: RegistryConfig + """ + + self._registry = registry + + @property + def tag(self): + """Gets the tag of this HarnessMainConfig. + + Docker tag used to push/pull the built images. # noqa: E501 + + :return: The tag of this HarnessMainConfig. + :rtype: str + """ + return self._tag + + @tag.setter + def tag(self, tag): + """Sets the tag of this HarnessMainConfig. + + Docker tag used to push/pull the built images. # noqa: E501 + + :param tag: The tag of this HarnessMainConfig. + :type tag: str + """ + + self._tag = tag + + @property + def apps(self): + """Gets the apps of this HarnessMainConfig. + + + :return: The apps of this HarnessMainConfig. + :rtype: Dict[str, ApplicationConfig] + """ + return self._apps + + @apps.setter + def apps(self, apps): + """Sets the apps of this HarnessMainConfig. + + + :param apps: The apps of this HarnessMainConfig. + :type apps: Dict[str, ApplicationConfig] + """ + if apps is None: + raise ValueError("Invalid value for `apps`, must not be `None`") # noqa: E501 + + self._apps = apps + + @property + def env(self): + """Gets the env of this HarnessMainConfig. + + Environmental variables added to all pods # noqa: E501 + + :return: The env of this HarnessMainConfig. + :rtype: List[NameValue] + """ + return self._env + + @env.setter + def env(self, env): + """Sets the env of this HarnessMainConfig. + + Environmental variables added to all pods # noqa: E501 + + :param env: The env of this HarnessMainConfig. + :type env: List[NameValue] + """ + + self._env = env + + @property + def privenv(self): + """Gets the privenv of this HarnessMainConfig. + + + :return: The privenv of this HarnessMainConfig. + :rtype: NameValue + """ + return self._privenv + + @privenv.setter + def privenv(self, privenv): + """Sets the privenv of this HarnessMainConfig. + + + :param privenv: The privenv of this HarnessMainConfig. + :type privenv: NameValue + """ + + self._privenv = privenv + + @property + def backup(self): + """Gets the backup of this HarnessMainConfig. + + + :return: The backup of this HarnessMainConfig. + :rtype: BackupConfig + """ + return self._backup + + @backup.setter + def backup(self, backup): + """Sets the backup of this HarnessMainConfig. + + + :param backup: The backup of this HarnessMainConfig. + :type backup: BackupConfig + """ + + self._backup = backup + + @property + def name(self): + """Gets the name of this HarnessMainConfig. + + Base name # noqa: E501 + + :return: The name of this HarnessMainConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this HarnessMainConfig. + + Base name # noqa: E501 + + :param name: The name of this HarnessMainConfig. + :type name: str + """ + + self._name = name + + @property + def task_images(self): + """Gets the task_images of this HarnessMainConfig. + + + :return: The task_images of this HarnessMainConfig. + :rtype: Dict[str, str] + """ + return self._task_images + + @task_images.setter + def task_images(self, task_images): + """Sets the task_images of this HarnessMainConfig. + + + :param task_images: The task_images of this HarnessMainConfig. + :type task_images: Dict[str, str] + """ + + self._task_images = task_images diff --git a/libraries/models/cloudharness_model/models/ingress_config.py b/libraries/models/cloudharness_model/models/ingress_config.py new file mode 100644 index 00000000..187a892e --- /dev/null +++ b/libraries/models/cloudharness_model/models/ingress_config.py @@ -0,0 +1,152 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec +from cloudharness_model.models.ingress_config_all_of import IngressConfigAllOf +from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt +from cloudharness_model import util + +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec # noqa: E501 +from cloudharness_model.models.ingress_config_all_of import IngressConfigAllOf # noqa: E501 +from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt # noqa: E501 + +class IngressConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, ssl_redirect=None, letsencrypt=None, auto=None, name=None): # noqa: E501 + """IngressConfig - a model defined in OpenAPI + + :param ssl_redirect: The ssl_redirect of this IngressConfig. # noqa: E501 + :type ssl_redirect: bool + :param letsencrypt: The letsencrypt of this IngressConfig. # noqa: E501 + :type letsencrypt: IngressConfigAllOfLetsencrypt + :param auto: The auto of this IngressConfig. # noqa: E501 + :type auto: bool + :param name: The name of this IngressConfig. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'ssl_redirect': bool, + 'letsencrypt': IngressConfigAllOfLetsencrypt, + 'auto': bool, + 'name': str + } + + self.attribute_map = { + 'ssl_redirect': 'ssl_redirect', + 'letsencrypt': 'letsencrypt', + 'auto': 'auto', + 'name': 'name' + } + + self._ssl_redirect = ssl_redirect + self._letsencrypt = letsencrypt + self._auto = auto + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'IngressConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The IngressConfig of this IngressConfig. # noqa: E501 + :rtype: IngressConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def ssl_redirect(self): + """Gets the ssl_redirect of this IngressConfig. + + + :return: The ssl_redirect of this IngressConfig. + :rtype: bool + """ + return self._ssl_redirect + + @ssl_redirect.setter + def ssl_redirect(self, ssl_redirect): + """Sets the ssl_redirect of this IngressConfig. + + + :param ssl_redirect: The ssl_redirect of this IngressConfig. + :type ssl_redirect: bool + """ + + self._ssl_redirect = ssl_redirect + + @property + def letsencrypt(self): + """Gets the letsencrypt of this IngressConfig. + + + :return: The letsencrypt of this IngressConfig. + :rtype: IngressConfigAllOfLetsencrypt + """ + return self._letsencrypt + + @letsencrypt.setter + def letsencrypt(self, letsencrypt): + """Sets the letsencrypt of this IngressConfig. + + + :param letsencrypt: The letsencrypt of this IngressConfig. + :type letsencrypt: IngressConfigAllOfLetsencrypt + """ + + self._letsencrypt = letsencrypt + + @property + def auto(self): + """Gets the auto of this IngressConfig. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this IngressConfig. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto): + """Sets the auto of this IngressConfig. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this IngressConfig. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self): + """Gets the name of this IngressConfig. + + + :return: The name of this IngressConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this IngressConfig. + + + :param name: The name of this IngressConfig. + :type name: str + """ + + self._name = name diff --git a/libraries/models/cloudharness_model/models/ingress_config_all_of.py b/libraries/models/cloudharness_model/models/ingress_config_all_of.py new file mode 100644 index 00000000..76fe8af2 --- /dev/null +++ b/libraries/models/cloudharness_model/models/ingress_config_all_of.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt +from cloudharness_model import util + +from cloudharness_model.models.ingress_config_all_of_letsencrypt import IngressConfigAllOfLetsencrypt # noqa: E501 + +class IngressConfigAllOf(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, ssl_redirect=None, letsencrypt=None): # noqa: E501 + """IngressConfigAllOf - a model defined in OpenAPI + + :param ssl_redirect: The ssl_redirect of this IngressConfigAllOf. # noqa: E501 + :type ssl_redirect: bool + :param letsencrypt: The letsencrypt of this IngressConfigAllOf. # noqa: E501 + :type letsencrypt: IngressConfigAllOfLetsencrypt + """ + self.openapi_types = { + 'ssl_redirect': bool, + 'letsencrypt': IngressConfigAllOfLetsencrypt + } + + self.attribute_map = { + 'ssl_redirect': 'ssl_redirect', + 'letsencrypt': 'letsencrypt' + } + + self._ssl_redirect = ssl_redirect + self._letsencrypt = letsencrypt + + @classmethod + def from_dict(cls, dikt) -> 'IngressConfigAllOf': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The IngressConfig_allOf of this IngressConfigAllOf. # noqa: E501 + :rtype: IngressConfigAllOf + """ + return util.deserialize_model(dikt, cls) + + @property + def ssl_redirect(self): + """Gets the ssl_redirect of this IngressConfigAllOf. + + + :return: The ssl_redirect of this IngressConfigAllOf. + :rtype: bool + """ + return self._ssl_redirect + + @ssl_redirect.setter + def ssl_redirect(self, ssl_redirect): + """Sets the ssl_redirect of this IngressConfigAllOf. + + + :param ssl_redirect: The ssl_redirect of this IngressConfigAllOf. + :type ssl_redirect: bool + """ + + self._ssl_redirect = ssl_redirect + + @property + def letsencrypt(self): + """Gets the letsencrypt of this IngressConfigAllOf. + + + :return: The letsencrypt of this IngressConfigAllOf. + :rtype: IngressConfigAllOfLetsencrypt + """ + return self._letsencrypt + + @letsencrypt.setter + def letsencrypt(self, letsencrypt): + """Sets the letsencrypt of this IngressConfigAllOf. + + + :param letsencrypt: The letsencrypt of this IngressConfigAllOf. + :type letsencrypt: IngressConfigAllOfLetsencrypt + """ + + self._letsencrypt = letsencrypt diff --git a/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py b/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py new file mode 100644 index 00000000..0e8a5ab8 --- /dev/null +++ b/libraries/models/cloudharness_model/models/ingress_config_all_of_letsencrypt.py @@ -0,0 +1,64 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class IngressConfigAllOfLetsencrypt(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, email=None): # noqa: E501 + """IngressConfigAllOfLetsencrypt - a model defined in OpenAPI + + :param email: The email of this IngressConfigAllOfLetsencrypt. # noqa: E501 + :type email: str + """ + self.openapi_types = { + 'email': str + } + + self.attribute_map = { + 'email': 'email' + } + + self._email = email + + @classmethod + def from_dict(cls, dikt) -> 'IngressConfigAllOfLetsencrypt': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The IngressConfig_allOf_letsencrypt of this IngressConfigAllOfLetsencrypt. # noqa: E501 + :rtype: IngressConfigAllOfLetsencrypt + """ + return util.deserialize_model(dikt, cls) + + @property + def email(self): + """Gets the email of this IngressConfigAllOfLetsencrypt. + + + :return: The email of this IngressConfigAllOfLetsencrypt. + :rtype: str + """ + return self._email + + @email.setter + def email(self, email): + """Sets the email of this IngressConfigAllOfLetsencrypt. + + + :param email: The email of this IngressConfigAllOfLetsencrypt. + :type email: str + """ + + self._email = email diff --git a/libraries/models/cloudharness_model/models/jupyter_hub_config.py b/libraries/models/cloudharness_model/models/jupyter_hub_config.py new file mode 100644 index 00000000..245556fe --- /dev/null +++ b/libraries/models/cloudharness_model/models/jupyter_hub_config.py @@ -0,0 +1,146 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class JupyterHubConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, args=None, extra_config=None, spawner_extra_config=None, application_hook=None): # noqa: E501 + """JupyterHubConfig - a model defined in OpenAPI + + :param args: The args of this JupyterHubConfig. # noqa: E501 + :type args: List[str] + :param extra_config: The extra_config of this JupyterHubConfig. # noqa: E501 + :type extra_config: Dict[str, str] + :param spawner_extra_config: The spawner_extra_config of this JupyterHubConfig. # noqa: E501 + :type spawner_extra_config: Dict[str, object] + :param application_hook: The application_hook of this JupyterHubConfig. # noqa: E501 + :type application_hook: object + """ + self.openapi_types = { + 'args': List[str], + 'extra_config': Dict[str, str], + 'spawner_extra_config': Dict[str, object], + 'application_hook': object + } + + self.attribute_map = { + 'args': 'args', + 'extra_config': 'extraConfig', + 'spawner_extra_config': 'spawnerExtraConfig', + 'application_hook': 'applicationHook' + } + + self._args = args + self._extra_config = extra_config + self._spawner_extra_config = spawner_extra_config + self._application_hook = application_hook + + @classmethod + def from_dict(cls, dikt) -> 'JupyterHubConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The JupyterHubConfig of this JupyterHubConfig. # noqa: E501 + :rtype: JupyterHubConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def args(self): + """Gets the args of this JupyterHubConfig. + + arguments passed to the container # noqa: E501 + + :return: The args of this JupyterHubConfig. + :rtype: List[str] + """ + return self._args + + @args.setter + def args(self, args): + """Sets the args of this JupyterHubConfig. + + arguments passed to the container # noqa: E501 + + :param args: The args of this JupyterHubConfig. + :type args: List[str] + """ + + self._args = args + + @property + def extra_config(self): + """Gets the extra_config of this JupyterHubConfig. + + + :return: The extra_config of this JupyterHubConfig. + :rtype: Dict[str, str] + """ + return self._extra_config + + @extra_config.setter + def extra_config(self, extra_config): + """Sets the extra_config of this JupyterHubConfig. + + + :param extra_config: The extra_config of this JupyterHubConfig. + :type extra_config: Dict[str, str] + """ + + self._extra_config = extra_config + + @property + def spawner_extra_config(self): + """Gets the spawner_extra_config of this JupyterHubConfig. + + + :return: The spawner_extra_config of this JupyterHubConfig. + :rtype: Dict[str, object] + """ + return self._spawner_extra_config + + @spawner_extra_config.setter + def spawner_extra_config(self, spawner_extra_config): + """Sets the spawner_extra_config of this JupyterHubConfig. + + + :param spawner_extra_config: The spawner_extra_config of this JupyterHubConfig. + :type spawner_extra_config: Dict[str, object] + """ + + self._spawner_extra_config = spawner_extra_config + + @property + def application_hook(self): + """Gets the application_hook of this JupyterHubConfig. + + change the hook function (advanced) Specify the Python name of the function (full module path, the module must be installed in the Docker image) # noqa: E501 + + :return: The application_hook of this JupyterHubConfig. + :rtype: object + """ + return self._application_hook + + @application_hook.setter + def application_hook(self, application_hook): + """Sets the application_hook of this JupyterHubConfig. + + change the hook function (advanced) Specify the Python name of the function (full module path, the module must be installed in the Docker image) # noqa: E501 + + :param application_hook: The application_hook of this JupyterHubConfig. + :type application_hook: object + """ + + self._application_hook = application_hook diff --git a/libraries/models/cloudharness_model/models/name_value.py b/libraries/models/cloudharness_model/models/name_value.py new file mode 100644 index 00000000..26cda4c6 --- /dev/null +++ b/libraries/models/cloudharness_model/models/name_value.py @@ -0,0 +1,92 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class NameValue(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, name=None, value=None): # noqa: E501 + """NameValue - a model defined in OpenAPI + + :param name: The name of this NameValue. # noqa: E501 + :type name: str + :param value: The value of this NameValue. # noqa: E501 + :type value: str + """ + self.openapi_types = { + 'name': str, + 'value': str + } + + self.attribute_map = { + 'name': 'name', + 'value': 'value' + } + + self._name = name + self._value = value + + @classmethod + def from_dict(cls, dikt) -> 'NameValue': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The NameValue of this NameValue. # noqa: E501 + :rtype: NameValue + """ + return util.deserialize_model(dikt, cls) + + @property + def name(self): + """Gets the name of this NameValue. + + + :return: The name of this NameValue. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this NameValue. + + + :param name: The name of this NameValue. + :type name: str + """ + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + + self._name = name + + @property + def value(self): + """Gets the value of this NameValue. + + + :return: The value of this NameValue. + :rtype: str + """ + return self._value + + @value.setter + def value(self, value): + """Sets the value of this NameValue. + + + :param value: The value of this NameValue. + :type value: str + """ + + self._value = value diff --git a/libraries/models/cloudharness_model/models/orchestration_event.py b/libraries/models/cloudharness_model/models/orchestration_event.py new file mode 100644 index 00000000..e8b4cee9 --- /dev/null +++ b/libraries/models/cloudharness_model/models/orchestration_event.py @@ -0,0 +1,188 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.orchestration_event_meta import OrchestrationEventMeta +from cloudharness_model import util + +from cloudharness_model.models.orchestration_event_meta import OrchestrationEventMeta # noqa: E501 + +class OrchestrationEvent(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, operation=None, uid=None, message_type=None, resource=None, meta=None): # noqa: E501 + """OrchestrationEvent - a model defined in OpenAPI + + :param operation: The operation of this OrchestrationEvent. # noqa: E501 + :type operation: str + :param uid: The uid of this OrchestrationEvent. # noqa: E501 + :type uid: str + :param message_type: The message_type of this OrchestrationEvent. # noqa: E501 + :type message_type: str + :param resource: The resource of this OrchestrationEvent. # noqa: E501 + :type resource: Dict[str, object] + :param meta: The meta of this OrchestrationEvent. # noqa: E501 + :type meta: OrchestrationEventMeta + """ + self.openapi_types = { + 'operation': str, + 'uid': str, + 'message_type': str, + 'resource': Dict[str, object], + 'meta': OrchestrationEventMeta + } + + self.attribute_map = { + 'operation': 'operation', + 'uid': 'uid', + 'message_type': 'message_type', + 'resource': 'resource', + 'meta': 'meta' + } + + self._operation = operation + self._uid = uid + self._message_type = message_type + self._resource = resource + self._meta = meta + + @classmethod + def from_dict(cls, dikt) -> 'OrchestrationEvent': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OrchestrationEvent of this OrchestrationEvent. # noqa: E501 + :rtype: OrchestrationEvent + """ + return util.deserialize_model(dikt, cls) + + @property + def operation(self): + """Gets the operation of this OrchestrationEvent. + + the operation on the object e.g. create / update / delete # noqa: E501 + + :return: The operation of this OrchestrationEvent. + :rtype: str + """ + return self._operation + + @operation.setter + def operation(self, operation): + """Sets the operation of this OrchestrationEvent. + + the operation on the object e.g. create / update / delete # noqa: E501 + + :param operation: The operation of this OrchestrationEvent. + :type operation: str + """ + allowed_values = ["create", "update", "delete", "other"] # noqa: E501 + if operation not in allowed_values: + raise ValueError( + "Invalid value for `operation` ({0}), must be one of {1}" + .format(operation, allowed_values) + ) + + self._operation = operation + + @property + def uid(self): + """Gets the uid of this OrchestrationEvent. + + the unique identifier attribute of the object # noqa: E501 + + :return: The uid of this OrchestrationEvent. + :rtype: str + """ + return self._uid + + @uid.setter + def uid(self, uid): + """Sets the uid of this OrchestrationEvent. + + the unique identifier attribute of the object # noqa: E501 + + :param uid: The uid of this OrchestrationEvent. + :type uid: str + """ + if uid is None: + raise ValueError("Invalid value for `uid`, must not be `None`") # noqa: E501 + + self._uid = uid + + @property + def message_type(self): + """Gets the message_type of this OrchestrationEvent. + + the type of the message (relates to the object type) e.g. jobs # noqa: E501 + + :return: The message_type of this OrchestrationEvent. + :rtype: str + """ + return self._message_type + + @message_type.setter + def message_type(self, message_type): + """Sets the message_type of this OrchestrationEvent. + + the type of the message (relates to the object type) e.g. jobs # noqa: E501 + + :param message_type: The message_type of this OrchestrationEvent. + :type message_type: str + """ + if message_type is None: + raise ValueError("Invalid value for `message_type`, must not be `None`") # noqa: E501 + + self._message_type = message_type + + @property + def resource(self): + """Gets the resource of this OrchestrationEvent. + + + :return: The resource of this OrchestrationEvent. + :rtype: Dict[str, object] + """ + return self._resource + + @resource.setter + def resource(self, resource): + """Sets the resource of this OrchestrationEvent. + + + :param resource: The resource of this OrchestrationEvent. + :type resource: Dict[str, object] + """ + + self._resource = resource + + @property + def meta(self): + """Gets the meta of this OrchestrationEvent. + + + :return: The meta of this OrchestrationEvent. + :rtype: OrchestrationEventMeta + """ + return self._meta + + @meta.setter + def meta(self, meta): + """Sets the meta of this OrchestrationEvent. + + + :param meta: The meta of this OrchestrationEvent. + :type meta: OrchestrationEventMeta + """ + if meta is None: + raise ValueError("Invalid value for `meta`, must not be `None`") # noqa: E501 + + self._meta = meta diff --git a/libraries/models/cloudharness_model/models/orchestration_event_meta.py b/libraries/models/cloudharness_model/models/orchestration_event_meta.py new file mode 100644 index 00000000..65eaa085 --- /dev/null +++ b/libraries/models/cloudharness_model/models/orchestration_event_meta.py @@ -0,0 +1,180 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.user import User +from cloudharness_model import util + +from cloudharness_model.models.user import User # noqa: E501 + +class OrchestrationEventMeta(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, app_name=None, user=None, args=None, kwargs=None, description=None): # noqa: E501 + """OrchestrationEventMeta - a model defined in OpenAPI + + :param app_name: The app_name of this OrchestrationEventMeta. # noqa: E501 + :type app_name: str + :param user: The user of this OrchestrationEventMeta. # noqa: E501 + :type user: User + :param args: The args of this OrchestrationEventMeta. # noqa: E501 + :type args: List[Dict] + :param kwargs: The kwargs of this OrchestrationEventMeta. # noqa: E501 + :type kwargs: object + :param description: The description of this OrchestrationEventMeta. # noqa: E501 + :type description: str + """ + self.openapi_types = { + 'app_name': str, + 'user': User, + 'args': List[Dict], + 'kwargs': object, + 'description': str + } + + self.attribute_map = { + 'app_name': 'app_name', + 'user': 'user', + 'args': 'args', + 'kwargs': 'kwargs', + 'description': 'description' + } + + self._app_name = app_name + self._user = user + self._args = args + self._kwargs = kwargs + self._description = description + + @classmethod + def from_dict(cls, dikt) -> 'OrchestrationEventMeta': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The OrchestrationEventMeta of this OrchestrationEventMeta. # noqa: E501 + :rtype: OrchestrationEventMeta + """ + return util.deserialize_model(dikt, cls) + + @property + def app_name(self): + """Gets the app_name of this OrchestrationEventMeta. + + The name of the application/microservice sending the message # noqa: E501 + + :return: The app_name of this OrchestrationEventMeta. + :rtype: str + """ + return self._app_name + + @app_name.setter + def app_name(self, app_name): + """Sets the app_name of this OrchestrationEventMeta. + + The name of the application/microservice sending the message # noqa: E501 + + :param app_name: The app_name of this OrchestrationEventMeta. + :type app_name: str + """ + if app_name is None: + raise ValueError("Invalid value for `app_name`, must not be `None`") # noqa: E501 + + self._app_name = app_name + + @property + def user(self): + """Gets the user of this OrchestrationEventMeta. + + + :return: The user of this OrchestrationEventMeta. + :rtype: User + """ + return self._user + + @user.setter + def user(self, user): + """Sets the user of this OrchestrationEventMeta. + + + :param user: The user of this OrchestrationEventMeta. + :type user: User + """ + + self._user = user + + @property + def args(self): + """Gets the args of this OrchestrationEventMeta. + + the caller function arguments # noqa: E501 + + :return: The args of this OrchestrationEventMeta. + :rtype: List[Dict] + """ + return self._args + + @args.setter + def args(self, args): + """Sets the args of this OrchestrationEventMeta. + + the caller function arguments # noqa: E501 + + :param args: The args of this OrchestrationEventMeta. + :type args: List[Dict] + """ + + self._args = args + + @property + def kwargs(self): + """Gets the kwargs of this OrchestrationEventMeta. + + the caller function keyword arguments # noqa: E501 + + :return: The kwargs of this OrchestrationEventMeta. + :rtype: object + """ + return self._kwargs + + @kwargs.setter + def kwargs(self, kwargs): + """Sets the kwargs of this OrchestrationEventMeta. + + the caller function keyword arguments # noqa: E501 + + :param kwargs: The kwargs of this OrchestrationEventMeta. + :type kwargs: object + """ + + self._kwargs = kwargs + + @property + def description(self): + """Gets the description of this OrchestrationEventMeta. + + General description -- for human consumption # noqa: E501 + + :return: The description of this OrchestrationEventMeta. + :rtype: str + """ + return self._description + + @description.setter + def description(self, description): + """Sets the description of this OrchestrationEventMeta. + + General description -- for human consumption # noqa: E501 + + :param description: The description of this OrchestrationEventMeta. + :type description: str + """ + + self._description = description diff --git a/libraries/models/cloudharness_model/models/registry_config.py b/libraries/models/cloudharness_model/models/registry_config.py new file mode 100644 index 00000000..07238dbd --- /dev/null +++ b/libraries/models/cloudharness_model/models/registry_config.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class RegistryConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, name=None, secret=None): # noqa: E501 + """RegistryConfig - a model defined in OpenAPI + + :param name: The name of this RegistryConfig. # noqa: E501 + :type name: str + :param secret: The secret of this RegistryConfig. # noqa: E501 + :type secret: str + """ + self.openapi_types = { + 'name': str, + 'secret': str + } + + self.attribute_map = { + 'name': 'name', + 'secret': 'secret' + } + + self._name = name + self._secret = secret + + @classmethod + def from_dict(cls, dikt) -> 'RegistryConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The RegistryConfig of this RegistryConfig. # noqa: E501 + :rtype: RegistryConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def name(self): + """Gets the name of this RegistryConfig. + + + :return: The name of this RegistryConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this RegistryConfig. + + + :param name: The name of this RegistryConfig. + :type name: str + """ + if name is None: + raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501 + + self._name = name + + @property + def secret(self): + """Gets the secret of this RegistryConfig. + + Optional secret used for pulling from docker registry. # noqa: E501 + + :return: The secret of this RegistryConfig. + :rtype: str + """ + return self._secret + + @secret.setter + def secret(self, secret): + """Sets the secret of this RegistryConfig. + + Optional secret used for pulling from docker registry. # noqa: E501 + + :param secret: The secret of this RegistryConfig. + :type secret: str + """ + + self._secret = secret diff --git a/libraries/models/cloudharness_model/models/service_auto_artifact_config.py b/libraries/models/cloudharness_model/models/service_auto_artifact_config.py new file mode 100644 index 00000000..7a4fb452 --- /dev/null +++ b/libraries/models/cloudharness_model/models/service_auto_artifact_config.py @@ -0,0 +1,126 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec +from cloudharness_model.models.service_auto_artifact_config_all_of import ServiceAutoArtifactConfigAllOf +from cloudharness_model import util + +from cloudharness_model.models.auto_artifact_spec import AutoArtifactSpec # noqa: E501 +from cloudharness_model.models.service_auto_artifact_config_all_of import ServiceAutoArtifactConfigAllOf # noqa: E501 + +class ServiceAutoArtifactConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, port=None, auto=None, name=None): # noqa: E501 + """ServiceAutoArtifactConfig - a model defined in OpenAPI + + :param port: The port of this ServiceAutoArtifactConfig. # noqa: E501 + :type port: int + :param auto: The auto of this ServiceAutoArtifactConfig. # noqa: E501 + :type auto: bool + :param name: The name of this ServiceAutoArtifactConfig. # noqa: E501 + :type name: str + """ + self.openapi_types = { + 'port': int, + 'auto': bool, + 'name': str + } + + self.attribute_map = { + 'port': 'port', + 'auto': 'auto', + 'name': 'name' + } + + self._port = port + self._auto = auto + self._name = name + + @classmethod + def from_dict(cls, dikt) -> 'ServiceAutoArtifactConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The ServiceAutoArtifactConfig of this ServiceAutoArtifactConfig. # noqa: E501 + :rtype: ServiceAutoArtifactConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def port(self): + """Gets the port of this ServiceAutoArtifactConfig. + + Service port # noqa: E501 + + :return: The port of this ServiceAutoArtifactConfig. + :rtype: int + """ + return self._port + + @port.setter + def port(self, port): + """Sets the port of this ServiceAutoArtifactConfig. + + Service port # noqa: E501 + + :param port: The port of this ServiceAutoArtifactConfig. + :type port: int + """ + + self._port = port + + @property + def auto(self): + """Gets the auto of this ServiceAutoArtifactConfig. + + When true, enables automatic template # noqa: E501 + + :return: The auto of this ServiceAutoArtifactConfig. + :rtype: bool + """ + return self._auto + + @auto.setter + def auto(self, auto): + """Sets the auto of this ServiceAutoArtifactConfig. + + When true, enables automatic template # noqa: E501 + + :param auto: The auto of this ServiceAutoArtifactConfig. + :type auto: bool + """ + if auto is None: + raise ValueError("Invalid value for `auto`, must not be `None`") # noqa: E501 + + self._auto = auto + + @property + def name(self): + """Gets the name of this ServiceAutoArtifactConfig. + + + :return: The name of this ServiceAutoArtifactConfig. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this ServiceAutoArtifactConfig. + + + :param name: The name of this ServiceAutoArtifactConfig. + :type name: str + """ + + self._name = name diff --git a/libraries/models/cloudharness_model/models/service_auto_artifact_config_all_of.py b/libraries/models/cloudharness_model/models/service_auto_artifact_config_all_of.py new file mode 100644 index 00000000..334fe5f3 --- /dev/null +++ b/libraries/models/cloudharness_model/models/service_auto_artifact_config_all_of.py @@ -0,0 +1,66 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class ServiceAutoArtifactConfigAllOf(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, port=None): # noqa: E501 + """ServiceAutoArtifactConfigAllOf - a model defined in OpenAPI + + :param port: The port of this ServiceAutoArtifactConfigAllOf. # noqa: E501 + :type port: int + """ + self.openapi_types = { + 'port': int + } + + self.attribute_map = { + 'port': 'port' + } + + self._port = port + + @classmethod + def from_dict(cls, dikt) -> 'ServiceAutoArtifactConfigAllOf': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The ServiceAutoArtifactConfig_allOf of this ServiceAutoArtifactConfigAllOf. # noqa: E501 + :rtype: ServiceAutoArtifactConfigAllOf + """ + return util.deserialize_model(dikt, cls) + + @property + def port(self): + """Gets the port of this ServiceAutoArtifactConfigAllOf. + + Service port # noqa: E501 + + :return: The port of this ServiceAutoArtifactConfigAllOf. + :rtype: int + """ + return self._port + + @port.setter + def port(self, port): + """Sets the port of this ServiceAutoArtifactConfigAllOf. + + Service port # noqa: E501 + + :param port: The port of this ServiceAutoArtifactConfigAllOf. + :type port: int + """ + + self._port = port diff --git a/libraries/models/cloudharness_model/models/uri_role_mapping_config.py b/libraries/models/cloudharness_model/models/uri_role_mapping_config.py new file mode 100644 index 00000000..9fe54def --- /dev/null +++ b/libraries/models/cloudharness_model/models/uri_role_mapping_config.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +import re +from cloudharness_model import util + +import re # noqa: E501 + +class UriRoleMappingConfig(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, uri=None, roles=None): # noqa: E501 + """UriRoleMappingConfig - a model defined in OpenAPI + + :param uri: The uri of this UriRoleMappingConfig. # noqa: E501 + :type uri: str + :param roles: The roles of this UriRoleMappingConfig. # noqa: E501 + :type roles: List[str] + """ + self.openapi_types = { + 'uri': str, + 'roles': List[str] + } + + self.attribute_map = { + 'uri': 'uri', + 'roles': 'roles' + } + + self._uri = uri + self._roles = roles + + @classmethod + def from_dict(cls, dikt) -> 'UriRoleMappingConfig': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The UriRoleMappingConfig of this UriRoleMappingConfig. # noqa: E501 + :rtype: UriRoleMappingConfig + """ + return util.deserialize_model(dikt, cls) + + @property + def uri(self): + """Gets the uri of this UriRoleMappingConfig. + + + :return: The uri of this UriRoleMappingConfig. + :rtype: str + """ + return self._uri + + @uri.setter + def uri(self, uri): + """Sets the uri of this UriRoleMappingConfig. + + + :param uri: The uri of this UriRoleMappingConfig. + :type uri: str + """ + if uri is None: + raise ValueError("Invalid value for `uri`, must not be `None`") # noqa: E501 + if uri is not None and not re.search(r'^[^<>:;,?|]+$', uri): # noqa: E501 + raise ValueError("Invalid value for `uri`, must be a follow pattern or equal to `/^[^<>:;,?|]+$/`") # noqa: E501 + + self._uri = uri + + @property + def roles(self): + """Gets the roles of this UriRoleMappingConfig. + + Roles allowed to access the present uri # noqa: E501 + + :return: The roles of this UriRoleMappingConfig. + :rtype: List[str] + """ + return self._roles + + @roles.setter + def roles(self, roles): + """Sets the roles of this UriRoleMappingConfig. + + Roles allowed to access the present uri # noqa: E501 + + :param roles: The roles of this UriRoleMappingConfig. + :type roles: List[str] + """ + if roles is None: + raise ValueError("Invalid value for `roles`, must not be `None`") # noqa: E501 + + self._roles = roles diff --git a/libraries/models/cloudharness_model/models/user.py b/libraries/models/cloudharness_model/models/user.py new file mode 100644 index 00000000..3d1c1895 --- /dev/null +++ b/libraries/models/cloudharness_model/models/user.py @@ -0,0 +1,534 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model.models.user_credential import UserCredential +from cloudharness_model import util + +from cloudharness_model.models.user_credential import UserCredential # noqa: E501 + +class User(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, access=None, attributes=None, client_roles=None, created_timestamp=None, credentials=None, disableable_credential_types=None, email=None, email_verified=None, enabled=None, federation_link=None, first_name=None, groups=None, id=None, last_name=None, realm_roles=None, required_actions=None, service_account_client_id=None, username=None, additional_properties=None): # noqa: E501 + """User - a model defined in OpenAPI + + :param access: The access of this User. # noqa: E501 + :type access: Dict[str, object] + :param attributes: The attributes of this User. # noqa: E501 + :type attributes: Dict[str, object] + :param client_roles: The client_roles of this User. # noqa: E501 + :type client_roles: Dict[str, object] + :param created_timestamp: The created_timestamp of this User. # noqa: E501 + :type created_timestamp: int + :param credentials: The credentials of this User. # noqa: E501 + :type credentials: List[UserCredential] + :param disableable_credential_types: The disableable_credential_types of this User. # noqa: E501 + :type disableable_credential_types: List[str] + :param email: The email of this User. # noqa: E501 + :type email: str + :param email_verified: The email_verified of this User. # noqa: E501 + :type email_verified: bool + :param enabled: The enabled of this User. # noqa: E501 + :type enabled: bool + :param federation_link: The federation_link of this User. # noqa: E501 + :type federation_link: str + :param first_name: The first_name of this User. # noqa: E501 + :type first_name: str + :param groups: The groups of this User. # noqa: E501 + :type groups: List[str] + :param id: The id of this User. # noqa: E501 + :type id: str + :param last_name: The last_name of this User. # noqa: E501 + :type last_name: str + :param realm_roles: The realm_roles of this User. # noqa: E501 + :type realm_roles: List[str] + :param required_actions: The required_actions of this User. # noqa: E501 + :type required_actions: List[str] + :param service_account_client_id: The service_account_client_id of this User. # noqa: E501 + :type service_account_client_id: str + :param username: The username of this User. # noqa: E501 + :type username: str + :param additional_properties: The additional_properties of this User. # noqa: E501 + :type additional_properties: object + """ + self.openapi_types = { + 'access': Dict[str, object], + 'attributes': Dict[str, object], + 'client_roles': Dict[str, object], + 'created_timestamp': int, + 'credentials': List[UserCredential], + 'disableable_credential_types': List[str], + 'email': str, + 'email_verified': bool, + 'enabled': bool, + 'federation_link': str, + 'first_name': str, + 'groups': List[str], + 'id': str, + 'last_name': str, + 'realm_roles': List[str], + 'required_actions': List[str], + 'service_account_client_id': str, + 'username': str, + 'additional_properties': object + } + + self.attribute_map = { + 'access': 'access', + 'attributes': 'attributes', + 'client_roles': 'clientRoles', + 'created_timestamp': 'createdTimestamp', + 'credentials': 'credentials', + 'disableable_credential_types': 'disableableCredentialTypes', + 'email': 'email', + 'email_verified': 'emailVerified', + 'enabled': 'enabled', + 'federation_link': 'federationLink', + 'first_name': 'firstName', + 'groups': 'groups', + 'id': 'id', + 'last_name': 'lastName', + 'realm_roles': 'realmRoles', + 'required_actions': 'requiredActions', + 'service_account_client_id': 'serviceAccountClientId', + 'username': 'username', + 'additional_properties': 'additionalProperties' + } + + self._access = access + self._attributes = attributes + self._client_roles = client_roles + self._created_timestamp = created_timestamp + self._credentials = credentials + self._disableable_credential_types = disableable_credential_types + self._email = email + self._email_verified = email_verified + self._enabled = enabled + self._federation_link = federation_link + self._first_name = first_name + self._groups = groups + self._id = id + self._last_name = last_name + self._realm_roles = realm_roles + self._required_actions = required_actions + self._service_account_client_id = service_account_client_id + self._username = username + self._additional_properties = additional_properties + + @classmethod + def from_dict(cls, dikt) -> 'User': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The User of this User. # noqa: E501 + :rtype: User + """ + return util.deserialize_model(dikt, cls) + + @property + def access(self): + """Gets the access of this User. + + + :return: The access of this User. + :rtype: Dict[str, object] + """ + return self._access + + @access.setter + def access(self, access): + """Sets the access of this User. + + + :param access: The access of this User. + :type access: Dict[str, object] + """ + + self._access = access + + @property + def attributes(self): + """Gets the attributes of this User. + + + :return: The attributes of this User. + :rtype: Dict[str, object] + """ + return self._attributes + + @attributes.setter + def attributes(self, attributes): + """Sets the attributes of this User. + + + :param attributes: The attributes of this User. + :type attributes: Dict[str, object] + """ + + self._attributes = attributes + + @property + def client_roles(self): + """Gets the client_roles of this User. + + + :return: The client_roles of this User. + :rtype: Dict[str, object] + """ + return self._client_roles + + @client_roles.setter + def client_roles(self, client_roles): + """Sets the client_roles of this User. + + + :param client_roles: The client_roles of this User. + :type client_roles: Dict[str, object] + """ + + self._client_roles = client_roles + + @property + def created_timestamp(self): + """Gets the created_timestamp of this User. + + + :return: The created_timestamp of this User. + :rtype: int + """ + return self._created_timestamp + + @created_timestamp.setter + def created_timestamp(self, created_timestamp): + """Sets the created_timestamp of this User. + + + :param created_timestamp: The created_timestamp of this User. + :type created_timestamp: int + """ + + self._created_timestamp = created_timestamp + + @property + def credentials(self): + """Gets the credentials of this User. + + + :return: The credentials of this User. + :rtype: List[UserCredential] + """ + return self._credentials + + @credentials.setter + def credentials(self, credentials): + """Sets the credentials of this User. + + + :param credentials: The credentials of this User. + :type credentials: List[UserCredential] + """ + + self._credentials = credentials + + @property + def disableable_credential_types(self): + """Gets the disableable_credential_types of this User. + + + :return: The disableable_credential_types of this User. + :rtype: List[str] + """ + return self._disableable_credential_types + + @disableable_credential_types.setter + def disableable_credential_types(self, disableable_credential_types): + """Sets the disableable_credential_types of this User. + + + :param disableable_credential_types: The disableable_credential_types of this User. + :type disableable_credential_types: List[str] + """ + + self._disableable_credential_types = disableable_credential_types + + @property + def email(self): + """Gets the email of this User. + + + :return: The email of this User. + :rtype: str + """ + return self._email + + @email.setter + def email(self, email): + """Sets the email of this User. + + + :param email: The email of this User. + :type email: str + """ + + self._email = email + + @property + def email_verified(self): + """Gets the email_verified of this User. + + + :return: The email_verified of this User. + :rtype: bool + """ + return self._email_verified + + @email_verified.setter + def email_verified(self, email_verified): + """Sets the email_verified of this User. + + + :param email_verified: The email_verified of this User. + :type email_verified: bool + """ + + self._email_verified = email_verified + + @property + def enabled(self): + """Gets the enabled of this User. + + + :return: The enabled of this User. + :rtype: bool + """ + return self._enabled + + @enabled.setter + def enabled(self, enabled): + """Sets the enabled of this User. + + + :param enabled: The enabled of this User. + :type enabled: bool + """ + + self._enabled = enabled + + @property + def federation_link(self): + """Gets the federation_link of this User. + + + :return: The federation_link of this User. + :rtype: str + """ + return self._federation_link + + @federation_link.setter + def federation_link(self, federation_link): + """Sets the federation_link of this User. + + + :param federation_link: The federation_link of this User. + :type federation_link: str + """ + + self._federation_link = federation_link + + @property + def first_name(self): + """Gets the first_name of this User. + + + :return: The first_name of this User. + :rtype: str + """ + return self._first_name + + @first_name.setter + def first_name(self, first_name): + """Sets the first_name of this User. + + + :param first_name: The first_name of this User. + :type first_name: str + """ + + self._first_name = first_name + + @property + def groups(self): + """Gets the groups of this User. + + + :return: The groups of this User. + :rtype: List[str] + """ + return self._groups + + @groups.setter + def groups(self, groups): + """Sets the groups of this User. + + + :param groups: The groups of this User. + :type groups: List[str] + """ + + self._groups = groups + + @property + def id(self): + """Gets the id of this User. + + + :return: The id of this User. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this User. + + + :param id: The id of this User. + :type id: str + """ + + self._id = id + + @property + def last_name(self): + """Gets the last_name of this User. + + + :return: The last_name of this User. + :rtype: str + """ + return self._last_name + + @last_name.setter + def last_name(self, last_name): + """Sets the last_name of this User. + + + :param last_name: The last_name of this User. + :type last_name: str + """ + + self._last_name = last_name + + @property + def realm_roles(self): + """Gets the realm_roles of this User. + + + :return: The realm_roles of this User. + :rtype: List[str] + """ + return self._realm_roles + + @realm_roles.setter + def realm_roles(self, realm_roles): + """Sets the realm_roles of this User. + + + :param realm_roles: The realm_roles of this User. + :type realm_roles: List[str] + """ + + self._realm_roles = realm_roles + + @property + def required_actions(self): + """Gets the required_actions of this User. + + + :return: The required_actions of this User. + :rtype: List[str] + """ + return self._required_actions + + @required_actions.setter + def required_actions(self, required_actions): + """Sets the required_actions of this User. + + + :param required_actions: The required_actions of this User. + :type required_actions: List[str] + """ + + self._required_actions = required_actions + + @property + def service_account_client_id(self): + """Gets the service_account_client_id of this User. + + + :return: The service_account_client_id of this User. + :rtype: str + """ + return self._service_account_client_id + + @service_account_client_id.setter + def service_account_client_id(self, service_account_client_id): + """Sets the service_account_client_id of this User. + + + :param service_account_client_id: The service_account_client_id of this User. + :type service_account_client_id: str + """ + + self._service_account_client_id = service_account_client_id + + @property + def username(self): + """Gets the username of this User. + + + :return: The username of this User. + :rtype: str + """ + return self._username + + @username.setter + def username(self, username): + """Sets the username of this User. + + + :param username: The username of this User. + :type username: str + """ + + self._username = username + + @property + def additional_properties(self): + """Gets the additional_properties of this User. + + + :return: The additional_properties of this User. + :rtype: object + """ + return self._additional_properties + + @additional_properties.setter + def additional_properties(self, additional_properties): + """Sets the additional_properties of this User. + + + :param additional_properties: The additional_properties of this User. + :type additional_properties: object + """ + + self._additional_properties = additional_properties diff --git a/libraries/models/cloudharness_model/models/user_credential.py b/libraries/models/cloudharness_model/models/user_credential.py new file mode 100644 index 00000000..4f54ddf5 --- /dev/null +++ b/libraries/models/cloudharness_model/models/user_credential.py @@ -0,0 +1,272 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class UserCredential(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, created_date=None, credential_data=None, id=None, priority=None, secret_data=None, temporary=None, type=None, user_label=None, value=None): # noqa: E501 + """UserCredential - a model defined in OpenAPI + + :param created_date: The created_date of this UserCredential. # noqa: E501 + :type created_date: int + :param credential_data: The credential_data of this UserCredential. # noqa: E501 + :type credential_data: str + :param id: The id of this UserCredential. # noqa: E501 + :type id: str + :param priority: The priority of this UserCredential. # noqa: E501 + :type priority: int + :param secret_data: The secret_data of this UserCredential. # noqa: E501 + :type secret_data: str + :param temporary: The temporary of this UserCredential. # noqa: E501 + :type temporary: bool + :param type: The type of this UserCredential. # noqa: E501 + :type type: str + :param user_label: The user_label of this UserCredential. # noqa: E501 + :type user_label: str + :param value: The value of this UserCredential. # noqa: E501 + :type value: str + """ + self.openapi_types = { + 'created_date': int, + 'credential_data': str, + 'id': str, + 'priority': int, + 'secret_data': str, + 'temporary': bool, + 'type': str, + 'user_label': str, + 'value': str + } + + self.attribute_map = { + 'created_date': 'createdDate', + 'credential_data': 'credentialData', + 'id': 'id', + 'priority': 'priority', + 'secret_data': 'secretData', + 'temporary': 'temporary', + 'type': 'type', + 'user_label': 'userLabel', + 'value': 'value' + } + + self._created_date = created_date + self._credential_data = credential_data + self._id = id + self._priority = priority + self._secret_data = secret_data + self._temporary = temporary + self._type = type + self._user_label = user_label + self._value = value + + @classmethod + def from_dict(cls, dikt) -> 'UserCredential': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The UserCredential of this UserCredential. # noqa: E501 + :rtype: UserCredential + """ + return util.deserialize_model(dikt, cls) + + @property + def created_date(self): + """Gets the created_date of this UserCredential. + + + :return: The created_date of this UserCredential. + :rtype: int + """ + return self._created_date + + @created_date.setter + def created_date(self, created_date): + """Sets the created_date of this UserCredential. + + + :param created_date: The created_date of this UserCredential. + :type created_date: int + """ + + self._created_date = created_date + + @property + def credential_data(self): + """Gets the credential_data of this UserCredential. + + + :return: The credential_data of this UserCredential. + :rtype: str + """ + return self._credential_data + + @credential_data.setter + def credential_data(self, credential_data): + """Sets the credential_data of this UserCredential. + + + :param credential_data: The credential_data of this UserCredential. + :type credential_data: str + """ + + self._credential_data = credential_data + + @property + def id(self): + """Gets the id of this UserCredential. + + + :return: The id of this UserCredential. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this UserCredential. + + + :param id: The id of this UserCredential. + :type id: str + """ + + self._id = id + + @property + def priority(self): + """Gets the priority of this UserCredential. + + + :return: The priority of this UserCredential. + :rtype: int + """ + return self._priority + + @priority.setter + def priority(self, priority): + """Sets the priority of this UserCredential. + + + :param priority: The priority of this UserCredential. + :type priority: int + """ + + self._priority = priority + + @property + def secret_data(self): + """Gets the secret_data of this UserCredential. + + + :return: The secret_data of this UserCredential. + :rtype: str + """ + return self._secret_data + + @secret_data.setter + def secret_data(self, secret_data): + """Sets the secret_data of this UserCredential. + + + :param secret_data: The secret_data of this UserCredential. + :type secret_data: str + """ + + self._secret_data = secret_data + + @property + def temporary(self): + """Gets the temporary of this UserCredential. + + + :return: The temporary of this UserCredential. + :rtype: bool + """ + return self._temporary + + @temporary.setter + def temporary(self, temporary): + """Sets the temporary of this UserCredential. + + + :param temporary: The temporary of this UserCredential. + :type temporary: bool + """ + + self._temporary = temporary + + @property + def type(self): + """Gets the type of this UserCredential. + + + :return: The type of this UserCredential. + :rtype: str + """ + return self._type + + @type.setter + def type(self, type): + """Sets the type of this UserCredential. + + + :param type: The type of this UserCredential. + :type type: str + """ + + self._type = type + + @property + def user_label(self): + """Gets the user_label of this UserCredential. + + + :return: The user_label of this UserCredential. + :rtype: str + """ + return self._user_label + + @user_label.setter + def user_label(self, user_label): + """Sets the user_label of this UserCredential. + + + :param user_label: The user_label of this UserCredential. + :type user_label: str + """ + + self._user_label = user_label + + @property + def value(self): + """Gets the value of this UserCredential. + + + :return: The value of this UserCredential. + :rtype: str + """ + return self._value + + @value.setter + def value(self, value): + """Sets the value of this UserCredential. + + + :param value: The value of this UserCredential. + :type value: str + """ + + self._value = value diff --git a/libraries/models/cloudharness_model/models/user_group.py b/libraries/models/cloudharness_model/models/user_group.py new file mode 100644 index 00000000..70c94921 --- /dev/null +++ b/libraries/models/cloudharness_model/models/user_group.py @@ -0,0 +1,246 @@ +# coding: utf-8 + +from __future__ import absolute_import +from datetime import date, datetime # noqa: F401 + +from typing import List, Dict # noqa: F401 + +from cloudharness_model.models.base_model_ import Model +from cloudharness_model import util + + +class UserGroup(Model): + """NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). + + Do not edit the class manually. + """ + + def __init__(self, access=None, attributes=None, client_roles=None, id=None, name=None, path=None, realm_roles=None, sub_groups=None): # noqa: E501 + """UserGroup - a model defined in OpenAPI + + :param access: The access of this UserGroup. # noqa: E501 + :type access: Dict[str, object] + :param attributes: The attributes of this UserGroup. # noqa: E501 + :type attributes: Dict[str, str] + :param client_roles: The client_roles of this UserGroup. # noqa: E501 + :type client_roles: Dict[str, object] + :param id: The id of this UserGroup. # noqa: E501 + :type id: str + :param name: The name of this UserGroup. # noqa: E501 + :type name: str + :param path: The path of this UserGroup. # noqa: E501 + :type path: str + :param realm_roles: The realm_roles of this UserGroup. # noqa: E501 + :type realm_roles: List[str] + :param sub_groups: The sub_groups of this UserGroup. # noqa: E501 + :type sub_groups: List[UserGroup] + """ + self.openapi_types = { + 'access': Dict[str, object], + 'attributes': Dict[str, str], + 'client_roles': Dict[str, object], + 'id': str, + 'name': str, + 'path': str, + 'realm_roles': List[str], + 'sub_groups': List[UserGroup] + } + + self.attribute_map = { + 'access': 'access', + 'attributes': 'attributes', + 'client_roles': 'clientRoles', + 'id': 'id', + 'name': 'name', + 'path': 'path', + 'realm_roles': 'realmRoles', + 'sub_groups': 'subGroups' + } + + self._access = access + self._attributes = attributes + self._client_roles = client_roles + self._id = id + self._name = name + self._path = path + self._realm_roles = realm_roles + self._sub_groups = sub_groups + + @classmethod + def from_dict(cls, dikt) -> 'UserGroup': + """Returns the dict as a model + + :param dikt: A dict. + :type: dict + :return: The UserGroup of this UserGroup. # noqa: E501 + :rtype: UserGroup + """ + return util.deserialize_model(dikt, cls) + + @property + def access(self): + """Gets the access of this UserGroup. + + + :return: The access of this UserGroup. + :rtype: Dict[str, object] + """ + return self._access + + @access.setter + def access(self, access): + """Sets the access of this UserGroup. + + + :param access: The access of this UserGroup. + :type access: Dict[str, object] + """ + + self._access = access + + @property + def attributes(self): + """Gets the attributes of this UserGroup. + + + :return: The attributes of this UserGroup. + :rtype: Dict[str, str] + """ + return self._attributes + + @attributes.setter + def attributes(self, attributes): + """Sets the attributes of this UserGroup. + + + :param attributes: The attributes of this UserGroup. + :type attributes: Dict[str, str] + """ + + self._attributes = attributes + + @property + def client_roles(self): + """Gets the client_roles of this UserGroup. + + + :return: The client_roles of this UserGroup. + :rtype: Dict[str, object] + """ + return self._client_roles + + @client_roles.setter + def client_roles(self, client_roles): + """Sets the client_roles of this UserGroup. + + + :param client_roles: The client_roles of this UserGroup. + :type client_roles: Dict[str, object] + """ + + self._client_roles = client_roles + + @property + def id(self): + """Gets the id of this UserGroup. + + + :return: The id of this UserGroup. + :rtype: str + """ + return self._id + + @id.setter + def id(self, id): + """Sets the id of this UserGroup. + + + :param id: The id of this UserGroup. + :type id: str + """ + + self._id = id + + @property + def name(self): + """Gets the name of this UserGroup. + + + :return: The name of this UserGroup. + :rtype: str + """ + return self._name + + @name.setter + def name(self, name): + """Sets the name of this UserGroup. + + + :param name: The name of this UserGroup. + :type name: str + """ + + self._name = name + + @property + def path(self): + """Gets the path of this UserGroup. + + + :return: The path of this UserGroup. + :rtype: str + """ + return self._path + + @path.setter + def path(self, path): + """Sets the path of this UserGroup. + + + :param path: The path of this UserGroup. + :type path: str + """ + + self._path = path + + @property + def realm_roles(self): + """Gets the realm_roles of this UserGroup. + + + :return: The realm_roles of this UserGroup. + :rtype: List[str] + """ + return self._realm_roles + + @realm_roles.setter + def realm_roles(self, realm_roles): + """Sets the realm_roles of this UserGroup. + + + :param realm_roles: The realm_roles of this UserGroup. + :type realm_roles: List[str] + """ + + self._realm_roles = realm_roles + + @property + def sub_groups(self): + """Gets the sub_groups of this UserGroup. + + + :return: The sub_groups of this UserGroup. + :rtype: List[UserGroup] + """ + return self._sub_groups + + @sub_groups.setter + def sub_groups(self, sub_groups): + """Sets the sub_groups of this UserGroup. + + + :param sub_groups: The sub_groups of this UserGroup. + :type sub_groups: List[UserGroup] + """ + + self._sub_groups = sub_groups diff --git a/libraries/models/cloudharness_model/rest.py b/libraries/models/cloudharness_model/rest.py new file mode 100644 index 00000000..79d1da49 --- /dev/null +++ b/libraries/models/cloudharness_model/rest.py @@ -0,0 +1,346 @@ +""" + cloudharness + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + + The version of the OpenAPI document: 1.0.0 + Generated by: https://openapi-generator.tech +""" + + +import io +import json +import logging +import re +import ssl +from urllib.parse import urlencode +from urllib.parse import urlparse +from urllib.request import proxy_bypass_environment +import urllib3 +import ipaddress + +from cloudharness_model.exceptions import ApiException, UnauthorizedException, ForbiddenException, NotFoundException, ServiceException, ApiValueError + + +logger = logging.getLogger(__name__) + + +class RESTResponse(io.IOBase): + + def __init__(self, resp): + self.urllib3_response = resp + self.status = resp.status + self.reason = resp.reason + self.data = resp.data + + def getheaders(self): + """Returns a dictionary of the response headers.""" + return self.urllib3_response.getheaders() + + def getheader(self, name, default=None): + """Returns a given response header.""" + return self.urllib3_response.getheader(name, default) + + +class RESTClientObject(object): + + def __init__(self, configuration, pools_size=4, maxsize=None): + # urllib3.PoolManager will pass all kw parameters to connectionpool + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501 + # https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501 + # maxsize is the number of requests to host that are allowed in parallel # noqa: E501 + # Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501 + + # cert_reqs + if configuration.verify_ssl: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + + addition_pool_args = {} + if configuration.assert_hostname is not None: + addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501 + + if configuration.retries is not None: + addition_pool_args['retries'] = configuration.retries + + if configuration.socket_options is not None: + addition_pool_args['socket_options'] = configuration.socket_options + + if maxsize is None: + if configuration.connection_pool_maxsize is not None: + maxsize = configuration.connection_pool_maxsize + else: + maxsize = 4 + + # https pool manager + if configuration.proxy and not should_bypass_proxies(configuration.host, no_proxy=configuration.no_proxy or ''): + self.pool_manager = urllib3.ProxyManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=configuration.ssl_ca_cert, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + proxy_url=configuration.proxy, + proxy_headers=configuration.proxy_headers, + **addition_pool_args + ) + else: + self.pool_manager = urllib3.PoolManager( + num_pools=pools_size, + maxsize=maxsize, + cert_reqs=cert_reqs, + ca_certs=configuration.ssl_ca_cert, + cert_file=configuration.cert_file, + key_file=configuration.key_file, + **addition_pool_args + ) + + def request(self, method, url, query_params=None, headers=None, + body=None, post_params=None, _preload_content=True, + _request_timeout=None): + """Perform requests. + + :param method: http request method + :param url: http request url + :param query_params: query parameters in the url + :param headers: http request headers + :param body: request json body, for `application/json` + :param post_params: request post parameters, + `application/x-www-form-urlencoded` + and `multipart/form-data` + :param _preload_content: if False, the urllib3.HTTPResponse object will + be returned without reading/decoding response + data. Default is True. + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + """ + method = method.upper() + assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT', + 'PATCH', 'OPTIONS'] + + if post_params and body: + raise ApiValueError( + "body parameter cannot be used with post_params parameter." + ) + + post_params = post_params or {} + headers = headers or {} + + timeout = None + if _request_timeout: + if isinstance(_request_timeout, (int, float)): # noqa: E501,F821 + timeout = urllib3.Timeout(total=_request_timeout) + elif (isinstance(_request_timeout, tuple) and + len(_request_timeout) == 2): + timeout = urllib3.Timeout( + connect=_request_timeout[0], read=_request_timeout[1]) + + try: + # For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE` + if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']: + # Only set a default Content-Type for POST, PUT, PATCH and OPTIONS requests + if (method != 'DELETE') and ('Content-Type' not in headers): + headers['Content-Type'] = 'application/json' + if query_params: + url += '?' + urlencode(query_params) + if ('Content-Type' not in headers) or (re.search('json', headers['Content-Type'], re.IGNORECASE)): + request_body = None + if body is not None: + request_body = json.dumps(body) + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501 + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=False, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + elif headers['Content-Type'] == 'multipart/form-data': + # must del headers['Content-Type'], or the correct + # Content-Type which generated by urllib3 will be + # overwritten. + del headers['Content-Type'] + r = self.pool_manager.request( + method, url, + fields=post_params, + encode_multipart=True, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + # Pass a `string` parameter directly in the body to support + # other content types than Json when `body` argument is + # provided in serialized form + elif isinstance(body, str) or isinstance(body, bytes): + request_body = body + r = self.pool_manager.request( + method, url, + body=request_body, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + else: + # Cannot generate the request from given parameters + msg = """Cannot prepare a request message for provided + arguments. Please check that your arguments match + declared content type.""" + raise ApiException(status=0, reason=msg) + # For `GET`, `HEAD` + else: + r = self.pool_manager.request(method, url, + fields=query_params, + preload_content=_preload_content, + timeout=timeout, + headers=headers) + except urllib3.exceptions.SSLError as e: + msg = "{0}\n{1}".format(type(e).__name__, str(e)) + raise ApiException(status=0, reason=msg) + + if _preload_content: + r = RESTResponse(r) + + # log response body + logger.debug("response body: %s", r.data) + + if not 200 <= r.status <= 299: + if r.status == 401: + raise UnauthorizedException(http_resp=r) + + if r.status == 403: + raise ForbiddenException(http_resp=r) + + if r.status == 404: + raise NotFoundException(http_resp=r) + + if 500 <= r.status <= 599: + raise ServiceException(http_resp=r) + + raise ApiException(http_resp=r) + + return r + + def GET(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("GET", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def HEAD(self, url, headers=None, query_params=None, _preload_content=True, + _request_timeout=None): + return self.request("HEAD", url, + headers=headers, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + query_params=query_params) + + def OPTIONS(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("OPTIONS", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def DELETE(self, url, headers=None, query_params=None, body=None, + _preload_content=True, _request_timeout=None): + return self.request("DELETE", url, + headers=headers, + query_params=query_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def POST(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("POST", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PUT(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PUT", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + + def PATCH(self, url, headers=None, query_params=None, post_params=None, + body=None, _preload_content=True, _request_timeout=None): + return self.request("PATCH", url, + headers=headers, + query_params=query_params, + post_params=post_params, + _preload_content=_preload_content, + _request_timeout=_request_timeout, + body=body) + +# end of class RESTClientObject +def is_ipv4(target): + """ Test if IPv4 address or not + """ + try: + chk = ipaddress.IPv4Address(target) + return True + except ipaddress.AddressValueError: + return False + +def in_ipv4net(target, net): + """ Test if target belongs to given IPv4 network + """ + try: + nw = ipaddress.IPv4Network(net) + ip = ipaddress.IPv4Address(target) + if ip in nw: + return True + return False + except ipaddress.AddressValueError: + return False + except ipaddress.NetmaskValueError: + return False + +def should_bypass_proxies(url, no_proxy=None): + """ Yet another requests.should_bypass_proxies + Test if proxies should not be used for a particular url. + """ + + parsed = urlparse(url) + + # special cases + if parsed.hostname in [None, '']: + return True + + # special cases + if no_proxy in [None , '']: + return False + if no_proxy == '*': + return True + + no_proxy = no_proxy.lower().replace(' ',''); + entries = ( + host for host in no_proxy.split(',') if host + ) + + if is_ipv4(parsed.hostname): + for item in entries: + if in_ipv4net(parsed.hostname, item): + return True + return proxy_bypass_environment(parsed.hostname, {'no': no_proxy} ) diff --git a/libraries/models/cloudharness_model/schemas.py b/libraries/models/cloudharness_model/schemas.py new file mode 100644 index 00000000..245b3f72 --- /dev/null +++ b/libraries/models/cloudharness_model/schemas.py @@ -0,0 +1,2045 @@ +# coding: utf-8 + +""" + cloudharness + + No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501 + + The version of the OpenAPI document: 1.0.0 + Generated by: https://openapi-generator.tech +""" + +from collections import defaultdict +from datetime import date, datetime, timedelta # noqa: F401 +from dataclasses import dataclass +import functools +import decimal +import io +import os +import re +import tempfile +import typing + +from dateutil.parser.isoparser import isoparser, _takes_ascii +from frozendict import frozendict + +from cloudharness_model.exceptions import ( + ApiTypeError, + ApiValueError, +) +from cloudharness_model.configuration import ( + Configuration, +) + + +class Unset(object): + """ + An instance of this class is set as the default value for object type(dict) properties that are optional + When a property has an unset value, that property will not be assigned in the dict + """ + pass + +unset = Unset() + +none_type = type(None) +file_type = io.IOBase + + +class FileIO(io.FileIO): + """ + A class for storing files + Note: this class is not immutable + """ + + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader]): + if isinstance(arg, (io.FileIO, io.BufferedReader)): + arg.close() + inst = super(FileIO, cls).__new__(cls, arg.name) + super(FileIO, inst).__init__(arg.name) + return inst + raise ApiValueError('FileIO must be passed arg which contains the open file') + + +def update(d: dict, u: dict): + """ + Adds u to d + Where each dict is defaultdict(set) + """ + for k, v in u.items(): + d[k] = d[k].union(v) + return d + + +class InstantiationMetadata: + """ + A class to store metadata that is needed when instantiating OpenApi Schema subclasses + """ + def __init__( + self, + path_to_item: typing.Tuple[typing.Union[str, int], ...] = tuple(['args[0]']), + from_server: bool = False, + configuration: typing.Optional[Configuration] = None, + base_classes: typing.FrozenSet[typing.Type] = frozenset(), + path_to_schemas: typing.Optional[typing.Dict[str, typing.Set[typing.Type]]] = None, + ): + """ + Args: + path_to_item: the path to the current data being instantiated. + For {'a': [1]} if the code is handling, 1, then the path is ('args[0]', 'a', 0) + from_server: whether or not this data came form the server + True when receiving server data + False when instantiating model with client side data not form the server + configuration: the Configuration instance to use + This is needed because in Configuration: + - one can disable validation checking + base_classes: when deserializing data that matches multiple schemas, this is used to store + the schemas that have been traversed. This is used to stop processing when a cycle is seen. + path_to_schemas: a dict that goes from path to a list of classes at each path location + """ + self.path_to_item = path_to_item + self.from_server = from_server + self.configuration = configuration + self.base_classes = base_classes + if path_to_schemas is None: + path_to_schemas = defaultdict(set) + self.path_to_schemas = path_to_schemas + + def __repr__(self): + return str(self.__dict__) + + def __eq__(self, other): + if not isinstance(other, InstantiationMetadata): + return False + return self.__dict__ == other.__dict__ + + +class ValidatorBase: + @staticmethod + def __is_json_validation_enabled(schema_keyword, configuration=None): + """Returns true if JSON schema validation is enabled for the specified + validation keyword. This can be used to skip JSON schema structural validation + as requested in the configuration. + + Args: + schema_keyword (string): the name of a JSON schema validation keyword. + configuration (Configuration): the configuration class. + """ + + return (configuration is None or + not hasattr(configuration, '_disabled_client_side_validations') or + schema_keyword not in configuration._disabled_client_side_validations) + + @staticmethod + def __raise_validation_error_message(value, constraint_msg, constraint_value, path_to_item, additional_txt=""): + raise ApiValueError( + "Invalid value `{value}`, {constraint_msg} `{constraint_value}`{additional_txt} at {path_to_item}".format( + value=value, + constraint_msg=constraint_msg, + constraint_value=constraint_value, + additional_txt=additional_txt, + path_to_item=path_to_item, + ) + ) + + @classmethod + def __check_str_validations(cls, + validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if (cls.__is_json_validation_enabled('maxLength', _instantiation_metadata.configuration) and + 'max_length' in validations and + len(input_values) > validations['max_length']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="length must be less than or equal to", + constraint_value=validations['max_length'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minLength', _instantiation_metadata.configuration) and + 'min_length' in validations and + len(input_values) < validations['min_length']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="length must be greater than or equal to", + constraint_value=validations['min_length'], + path_to_item=_instantiation_metadata.path_to_item + ) + + checked_value = input_values + if (cls.__is_json_validation_enabled('pattern', _instantiation_metadata.configuration) and + 'regex' in validations): + for regex_dict in validations['regex']: + flags = regex_dict.get('flags', 0) + if not re.search(regex_dict['pattern'], checked_value, flags=flags): + if flags != 0: + # Don't print the regex flags if the flags are not + # specified in the OAS document. + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must match regular expression", + constraint_value=regex_dict['pattern'], + path_to_item=_instantiation_metadata.path_to_item, + additional_txt=" with flags=`{}`".format(flags) + ) + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must match regular expression", + constraint_value=regex_dict['pattern'], + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def __check_tuple_validations( + cls, validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if (cls.__is_json_validation_enabled('maxItems', _instantiation_metadata.configuration) and + 'max_items' in validations and + len(input_values) > validations['max_items']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of items must be less than or equal to", + constraint_value=validations['max_items'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minItems', _instantiation_metadata.configuration) and + 'min_items' in validations and + len(input_values) < validations['min_items']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of items must be greater than or equal to", + constraint_value=validations['min_items'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('uniqueItems', _instantiation_metadata.configuration) and + 'unique_items' in validations and validations['unique_items'] and input_values): + unique_items = [] + # print(validations) + for item in input_values: + if item not in unique_items: + unique_items.append(item) + if len(input_values) > len(unique_items): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="duplicate items were found, and the tuple must not contain duplicates because", + constraint_value='unique_items==True', + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def __check_dict_validations( + cls, validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if (cls.__is_json_validation_enabled('maxProperties', _instantiation_metadata.configuration) and + 'max_properties' in validations and + len(input_values) > validations['max_properties']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of properties must be less than or equal to", + constraint_value=validations['max_properties'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minProperties', _instantiation_metadata.configuration) and + 'min_properties' in validations and + len(input_values) < validations['min_properties']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="number of properties must be greater than or equal to", + constraint_value=validations['min_properties'], + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def __check_numeric_validations( + cls, validations, input_values, + _instantiation_metadata: InstantiationMetadata): + + if cls.__is_json_validation_enabled('multipleOf', + _instantiation_metadata.configuration) and 'multiple_of' in validations: + multiple_of_values = validations['multiple_of'] + for multiple_of_value in multiple_of_values: + if (isinstance(input_values, decimal.Decimal) and + not (float(input_values) / multiple_of_value).is_integer() + ): + # Note 'multipleOf' will be as good as the floating point arithmetic. + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="value must be a multiple of", + constraint_value=multiple_of_value, + path_to_item=_instantiation_metadata.path_to_item + ) + + checking_max_or_min_values = {'exclusive_maximum', 'inclusive_maximum', 'exclusive_minimum', + 'inclusive_minimum'}.isdisjoint(validations) is False + if not checking_max_or_min_values: + return + max_val = input_values + min_val = input_values + + if (cls.__is_json_validation_enabled('exclusiveMaximum', _instantiation_metadata.configuration) and + 'exclusive_maximum' in validations and + max_val >= validations['exclusive_maximum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value less than", + constraint_value=validations['exclusive_maximum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('maximum', _instantiation_metadata.configuration) and + 'inclusive_maximum' in validations and + max_val > validations['inclusive_maximum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value less than or equal to", + constraint_value=validations['inclusive_maximum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('exclusiveMinimum', _instantiation_metadata.configuration) and + 'exclusive_minimum' in validations and + min_val <= validations['exclusive_minimum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value greater than", + constraint_value=validations['exclusive_maximum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + if (cls.__is_json_validation_enabled('minimum', _instantiation_metadata.configuration) and + 'inclusive_minimum' in validations and + min_val < validations['inclusive_minimum']): + cls.__raise_validation_error_message( + value=input_values, + constraint_msg="must be a value greater than or equal to", + constraint_value=validations['inclusive_minimum'], + path_to_item=_instantiation_metadata.path_to_item + ) + + @classmethod + def _check_validations_for_types( + cls, + validations, + input_values, + _instantiation_metadata: InstantiationMetadata + ): + if isinstance(input_values, str): + cls.__check_str_validations(validations, input_values, _instantiation_metadata) + elif isinstance(input_values, tuple): + cls.__check_tuple_validations(validations, input_values, _instantiation_metadata) + elif isinstance(input_values, frozendict): + cls.__check_dict_validations(validations, input_values, _instantiation_metadata) + elif isinstance(input_values, decimal.Decimal): + cls.__check_numeric_validations(validations, input_values, _instantiation_metadata) + try: + return super()._validate_validations_pass(input_values, _instantiation_metadata) + except AttributeError: + return True + + +class Validator(typing.Protocol): + def _validate_validations_pass( + cls, + input_values, + _instantiation_metadata: InstantiationMetadata + ): + pass + + +def _SchemaValidator(**validations: typing.Union[str, bool, None, int, float, list[dict[str, typing.Union[str, int, float]]]]) -> Validator: + class SchemaValidator(ValidatorBase): + @classmethod + def _validate_validations_pass( + cls, + input_values, + _instantiation_metadata: InstantiationMetadata + ): + cls._check_validations_for_types(validations, input_values, _instantiation_metadata) + try: + return super()._validate_validations_pass(input_values, _instantiation_metadata) + except AttributeError: + return True + + return SchemaValidator + + +class TypeChecker(typing.Protocol): + @classmethod + def _validate_type( + cls, arg_simple_class: type + ) -> typing.Tuple[type]: + pass + + +def _SchemaTypeChecker(union_type_cls: typing.Union[typing.Any]) -> TypeChecker: + if typing.get_origin(union_type_cls) is typing.Union: + union_classes = typing.get_args(union_type_cls) + else: + # note: when a union of a single class is passed in, the union disappears + union_classes = tuple([union_type_cls]) + """ + I want the type hint... union_type_cls + and to use it as a base class but when I do, I get + TypeError: metaclass conflict: the metaclass of a derived class must be a (non-strict) subclass of the metaclasses of all its bases + """ + class SchemaTypeChecker: + @classmethod + def _validate_type(cls, arg_simple_class: type): + if arg_simple_class not in union_classes: + return union_classes + try: + return super()._validate_type(arg_simple_class) + except AttributeError: + return tuple() + + return SchemaTypeChecker + + +class EnumMakerBase: + @classmethod + @property + def _enum_by_value( + cls + ) -> type: + enum_classes = {} + if not hasattr(cls, "_enum_value_to_name"): + return enum_classes + for enum_value, enum_name in cls._enum_value_to_name.items(): + base_class = type(enum_value) + if base_class is none_type: + enum_classes[enum_value] = get_new_class( + "Dynamic" + cls.__name__, (cls, NoneClass)) + log_cache_usage(get_new_class) + elif base_class is bool: + enum_classes[enum_value] = get_new_class( + "Dynamic" + cls.__name__, (cls, BoolClass)) + log_cache_usage(get_new_class) + else: + enum_classes[enum_value] = get_new_class( + "Dynamic" + cls.__name__, (cls, Singleton, base_class)) + log_cache_usage(get_new_class) + return enum_classes + + +class EnumMakerInterface(typing.Protocol): + @classmethod + @property + def _enum_value_to_name( + cls + ) -> typing.Dict[typing.Union[str, decimal.Decimal, bool, none_type], str]: + pass + + @classmethod + @property + def _enum_by_value( + cls + ) -> type: + pass + + +def _SchemaEnumMaker(enum_value_to_name: typing.Dict[typing.Union[str, decimal.Decimal, bool, none_type], str]) -> EnumMakerInterface: + class SchemaEnumMaker(EnumMakerBase): + @classmethod + @property + def _enum_value_to_name( + cls + ) -> typing.Dict[typing.Union[str, decimal.Decimal, bool, none_type], str]: + pass + try: + super_enum_value_to_name = super()._enum_value_to_name + except AttributeError: + return enum_value_to_name + intersection = dict(enum_value_to_name.items() & super_enum_value_to_name.items()) + return intersection + + return SchemaEnumMaker + + +class Singleton: + """ + Enums and singletons are the same + The same instance is returned for a given key of (cls, arg) + """ + # TODO use bidict to store this so boolean enums can move through it in reverse to get their own arg value? + _instances = {} + + def __new__(cls, *args, **kwargs): + if not args: + raise ValueError('arg must be passed') + arg = args[0] + key = (cls, arg) + if key not in cls._instances: + if arg in {None, True, False}: + inst = super().__new__(cls) + # inst._value = arg + cls._instances[key] = inst + else: + cls._instances[key] = super().__new__(cls, arg) + return cls._instances[key] + + def __repr__(self): + return '({}, {})'.format(self.__class__.__name__, self) + + +class NoneClass(Singleton): + @classmethod + @property + def NONE(cls): + return cls(None) + + def is_none(self) -> bool: + return True + + def __bool__(self) -> bool: + return False + + +class BoolClass(Singleton): + @classmethod + @property + def TRUE(cls): + return cls(True) + + @classmethod + @property + def FALSE(cls): + return cls(False) + + @functools.cache + def __bool__(self) -> bool: + for key, instance in self._instances.items(): + if self is instance: + return key[1] + raise ValueError('Unable to find the boolean value of this instance') + + def is_true(self): + return bool(self) + + def is_false(self): + return bool(self) + + +class BoolBase: + pass + + +class NoneBase: + pass + + +class StrBase: + @property + def as_str(self) -> str: + return self + + @property + def as_date(self) -> date: + raise Exception('not implemented') + + @property + def as_datetime(self) -> datetime: + raise Exception('not implemented') + + @property + def as_decimal(self) -> decimal.Decimal: + raise Exception('not implemented') + + +class CustomIsoparser(isoparser): + + @_takes_ascii + def parse_isodatetime(self, dt_str): + components, pos = self._parse_isodate(dt_str) + if len(dt_str) > pos: + if self._sep is None or dt_str[pos:pos + 1] == self._sep: + components += self._parse_isotime(dt_str[pos + 1:]) + else: + raise ValueError('String contains unknown ISO components') + + if len(components) > 3 and components[3] == 24: + components[3] = 0 + return datetime(*components) + timedelta(days=1) + + if len(components) <= 3: + raise ValueError('Value is not a datetime') + + return datetime(*components) + + @_takes_ascii + def parse_isodate(self, datestr): + components, pos = self._parse_isodate(datestr) + + if len(datestr) > pos: + raise ValueError('String contains invalid time components') + + if len(components) > 3: + raise ValueError('String contains invalid time components') + + return date(*components) + + +DEFAULT_ISOPARSER = CustomIsoparser() + + +class DateBase(StrBase): + @property + @functools.cache + def as_date(self) -> date: + return DEFAULT_ISOPARSER.parse_isodate(self) + + @classmethod + def _validate_format(cls, arg: typing.Optional[str], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, str): + try: + DEFAULT_ISOPARSER.parse_isodate(arg) + return True + except ValueError: + raise ApiValueError( + "Value does not conform to the required ISO-8601 date format. " + "Invalid value '{}' for type date at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DateBase _validate + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class DateTimeBase: + @property + @functools.cache + def as_datetime(self) -> datetime: + return DEFAULT_ISOPARSER.parse_isodatetime(self) + + @classmethod + def _validate_format(cls, arg: typing.Optional[str], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, str): + try: + DEFAULT_ISOPARSER.parse_isodatetime(arg) + return True + except ValueError: + raise ApiValueError( + "Value does not conform to the required ISO-8601 datetime format. " + "Invalid value '{}' for type datetime at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DateTimeBase _validate + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class DecimalBase(StrBase): + """ + A class for storing decimals that are sent over the wire as strings + These schemas must remain based on StrBase rather than NumberBase + because picking base classes must be deterministic + """ + + @property + @functools.cache + def as_decimal(self) -> decimal.Decimal: + return decimal.Decimal(self) + + @classmethod + def _validate_format(cls, arg: typing.Optional[str], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, str): + try: + decimal.Decimal(arg) + return True + except decimal.InvalidOperation: + raise ApiValueError( + "Value cannot be converted to a decimal. " + "Invalid value '{}' for type decimal at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DecimalBase _validate + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class NumberBase: + @property + def as_int(self) -> int: + try: + return self._as_int + except AttributeError: + """ + Note: for some numbers like 9.0 they could be represented as an + integer but our code chooses to store them as + >>> Decimal('9.0').as_tuple() + DecimalTuple(sign=0, digits=(9, 0), exponent=-1) + so we can tell that the value came from a float and convert it back to a float + during later serialization + """ + if self.as_tuple().exponent < 0: + # this could be represented as an integer but should be represented as a float + # because that's what it was serialized from + raise ApiValueError(f'{self} is not an integer') + self._as_int = int(self) + return self._as_int + + @property + def as_float(self) -> float: + try: + return self._as_float + except AttributeError: + if self.as_tuple().exponent >= 0: + raise ApiValueError(f'{self} is not an float') + self._as_float = float(self) + return self._as_float + + +class ListBase: + @classmethod + def _validate_items(cls, list_items, _instantiation_metadata: InstantiationMetadata): + """ + Ensures that: + - values passed in for items are valid + Exceptions will be raised if: + - invalid arguments were passed in + + Args: + list_items: the input list of items + + Raises: + ApiTypeError - for missing required arguments, or for invalid properties + """ + + # if we have definitions for an items schema, use it + # otherwise accept anything + item_cls = getattr(cls, '_items', AnyTypeSchema) + path_to_schemas = defaultdict(set) + for i, value in enumerate(list_items): + if isinstance(value, item_cls): + continue + item_instantiation_metadata = InstantiationMetadata( + from_server=_instantiation_metadata.from_server, + configuration=_instantiation_metadata.configuration, + path_to_item=_instantiation_metadata.path_to_item+(i,) + ) + other_path_to_schemas = item_cls._validate( + value, _instantiation_metadata=item_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + return path_to_schemas + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + ListBase _validate + We return dynamic classes of different bases depending upon the inputs + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Returns: + new_cls (type): the new class + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + arg = args[0] + _path_to_schemas = super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + if not isinstance(arg, tuple): + return _path_to_schemas + if cls in _instantiation_metadata.base_classes: + # we have already moved through this class so stop here + return _path_to_schemas + _instantiation_metadata.base_classes |= frozenset({cls}) + other_path_to_schemas = cls._validate_items(arg, _instantiation_metadata=_instantiation_metadata) + update(_path_to_schemas, other_path_to_schemas) + return _path_to_schemas + + @classmethod + def _get_items(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + ''' + ListBase _get_items + ''' + _instantiation_metadata = InstantiationMetadata() if _instantiation_metadata is None else _instantiation_metadata + + list_items = args[0] + cast_items = [] + # if we have definitions for an items schema, use it + # otherwise accept anything + + cls_item_cls = getattr(cls, '_items', AnyTypeSchema) + for i, value in enumerate(list_items): + item_path_to_item = _instantiation_metadata.path_to_item+(i,) + if item_path_to_item in _instantiation_metadata.path_to_schemas: + item_cls = _instantiation_metadata.path_to_schemas[item_path_to_item] + else: + item_cls = cls_item_cls + + if isinstance(value, item_cls): + cast_items.append(value) + continue + item_instantiation_metadata = InstantiationMetadata( + configuration=_instantiation_metadata.configuration, + from_server=_instantiation_metadata.from_server, + path_to_item=item_path_to_item, + path_to_schemas=_instantiation_metadata.path_to_schemas, + ) + + if _instantiation_metadata.from_server: + new_value = item_cls._from_openapi_data(value, _instantiation_metadata=item_instantiation_metadata) + else: + new_value = item_cls(value, _instantiation_metadata=item_instantiation_metadata) + cast_items.append(new_value) + + return cast_items + + +class Discriminable: + @classmethod + def _ensure_discriminator_value_present(cls, disc_property_name: str, _instantiation_metadata: InstantiationMetadata, *args): + if not args or args and disc_property_name not in args[0]: + # The input data does not contain the discriminator property + raise ApiValueError( + "Cannot deserialize input data due to missing discriminator. " + "The discriminator property '{}' is missing at path: {}".format(disc_property_name, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _get_discriminated_class(cls, disc_property_name: str, disc_payload_value: str): + """ + Used in schemas with discriminators + """ + if not hasattr(cls, '_discriminator'): + return None + disc = cls._discriminator + if disc_property_name not in disc: + return None + discriminated_cls = disc[disc_property_name].get(disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + elif not hasattr(cls, '_composed_schemas'): + return None + # TODO stop traveling if a cycle is hit + for allof_cls in cls._composed_schemas['allOf']: + discriminated_cls = allof_cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + for oneof_cls in cls._composed_schemas['oneOf']: + discriminated_cls = oneof_cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + for anyof_cls in cls._composed_schemas['anyOf']: + discriminated_cls = anyof_cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=disc_payload_value) + if discriminated_cls is not None: + return discriminated_cls + return None + + +class DictBase(Discriminable): + # subclass properties + _required_property_names = set() + + @classmethod + def _validate_arg_presence(cls, arg): + """ + Ensures that: + - all required arguments are passed in + - the input variable names are valid + - present in properties or + - accepted because additionalProperties exists + Exceptions will be raised if: + - invalid arguments were passed in + - a var_name is invalid if additionProperties == None and var_name not in _properties + - required properties were not passed in + + Args: + arg: the input dict + + Raises: + ApiTypeError - for missing required arguments, or for invalid properties + """ + seen_required_properties = set() + invalid_arguments = [] + for property_name in arg: + if property_name in cls._required_property_names: + seen_required_properties.add(property_name) + elif property_name in cls._property_names: + continue + elif cls._additional_properties: + continue + else: + invalid_arguments.append(property_name) + missing_required_arguments = list(cls._required_property_names - seen_required_properties) + if missing_required_arguments: + missing_required_arguments.sort() + raise ApiTypeError( + "{} is missing {} required argument{}: {}".format( + cls.__name__, + len(missing_required_arguments), + "s" if len(missing_required_arguments) > 1 else "", + missing_required_arguments + ) + ) + if invalid_arguments: + invalid_arguments.sort() + raise ApiTypeError( + "{} was passed {} invalid argument{}: {}".format( + cls.__name__, + len(invalid_arguments), + "s" if len(invalid_arguments) > 1 else "", + invalid_arguments + ) + ) + + @classmethod + def _validate_args(cls, arg, _instantiation_metadata: InstantiationMetadata): + """ + Ensures that: + - values passed in for properties are valid + Exceptions will be raised if: + - invalid arguments were passed in + + Args: + arg: the input dict + + Raises: + ApiTypeError - for missing required arguments, or for invalid properties + """ + path_to_schemas = defaultdict(set) + for property_name, value in arg.items(): + if property_name in cls._required_property_names or property_name in cls._property_names: + schema = getattr(cls, property_name) + elif cls._additional_properties: + schema = cls._additional_properties + else: + raise ApiTypeError('Unable to find schema for value={} in class={} at path_to_item={}'.format( + value, cls, _instantiation_metadata.path_to_item+(property_name,) + )) + if isinstance(value, schema): + continue + arg_instantiation_metadata = InstantiationMetadata( + from_server=_instantiation_metadata.from_server, + configuration=_instantiation_metadata.configuration, + path_to_item=_instantiation_metadata.path_to_item+(property_name,) + ) + other_path_to_schemas = schema._validate(value, _instantiation_metadata=arg_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + _instantiation_metadata.path_to_schemas.update(arg_instantiation_metadata.path_to_schemas) + return path_to_schemas + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DictBase _validate + We return dynamic classes of different bases depending upon the inputs + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Returns: + new_cls (type): the new class + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + if args and isinstance(args[0], cls): + # an instance of the correct type was passed in + return {} + arg = args[0] + _path_to_schemas = super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + if not isinstance(arg, frozendict): + return _path_to_schemas + cls._validate_arg_presence(args[0]) + other_path_to_schemas = cls._validate_args(args[0], _instantiation_metadata=_instantiation_metadata) + update(_path_to_schemas, other_path_to_schemas) + try: + _discriminator = cls._discriminator + except AttributeError: + return _path_to_schemas + # discriminator exists + disc_prop_name = list(_discriminator.keys())[0] + cls._ensure_discriminator_value_present(disc_prop_name, _instantiation_metadata, *args) + discriminated_cls = cls._get_discriminated_class( + disc_property_name=disc_prop_name, disc_payload_value=arg[disc_prop_name]) + if discriminated_cls is None: + raise ApiValueError( + "Invalid discriminator value was passed in to {}.{} Only the values {} are allowed at {}".format( + cls.__name__, + disc_prop_name, + list(_discriminator[disc_prop_name].keys()), + _instantiation_metadata.path_to_item + (disc_prop_name,) + ) + ) + if discriminated_cls in _instantiation_metadata.base_classes: + # we have already moved through this class so stop here + return _path_to_schemas + _instantiation_metadata.base_classes |= frozenset({cls}) + other_path_to_schemas = discriminated_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + update(_path_to_schemas, other_path_to_schemas) + return _path_to_schemas + + @classmethod + @property + def _additional_properties(cls): + return AnyTypeSchema + + @classmethod + @property + @functools.cache + def _property_names(cls): + property_names = set() + for var_name, var_value in cls.__dict__.items(): + # referenced models are classmethods + is_classmethod = type(var_value) is classmethod + if is_classmethod: + property_names.add(var_name) + continue + is_class = type(var_value) is type + if not is_class: + continue + if not issubclass(var_value, Schema): + continue + if var_name == '_additional_properties': + continue + property_names.add(var_name) + property_names = list(property_names) + property_names.sort() + return tuple(property_names) + + @classmethod + def _get_properties(cls, arg: typing.Dict[str, typing.Any], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + DictBase _get_properties, this is how properties are set + These values already passed validation + """ + dict_items = {} + # if we have definitions for property schemas convert values using it + # otherwise accept anything + + _instantiation_metadata = InstantiationMetadata() if _instantiation_metadata is None else _instantiation_metadata + + for property_name_js, value in arg.items(): + property_cls = getattr(cls, property_name_js, cls._additional_properties) + property_path_to_item = _instantiation_metadata.path_to_item+(property_name_js,) + stored_property_cls = _instantiation_metadata.path_to_schemas.get(property_path_to_item) + if stored_property_cls: + property_cls = stored_property_cls + + if isinstance(value, property_cls): + dict_items[property_name_js] = value + continue + + prop_instantiation_metadata = InstantiationMetadata( + configuration=_instantiation_metadata.configuration, + from_server=_instantiation_metadata.from_server, + path_to_item=property_path_to_item, + path_to_schemas=_instantiation_metadata.path_to_schemas, + ) + if _instantiation_metadata.from_server: + new_value = property_cls._from_openapi_data(value, _instantiation_metadata=prop_instantiation_metadata) + else: + new_value = property_cls(value, _instantiation_metadata=prop_instantiation_metadata) + dict_items[property_name_js] = new_value + return dict_items + + def __setattr__(self, name, value): + if not isinstance(self, FileIO): + raise AttributeError('property setting not supported on immutable instances') + + def __getattr__(self, name): + if isinstance(self, frozendict): + # if an attribute does not exist + try: + return self[name] + except KeyError as ex: + raise AttributeError(str(ex)) + # print(('non-frozendict __getattr__', name)) + return super().__getattr__(self, name) + + def __getattribute__(self, name): + # print(('__getattribute__', name)) + # if an attribute does exist (for example as a class property but not as an instance method) + try: + return self[name] + except (KeyError, TypeError): + return super().__getattribute__(name) + + +inheritable_primitive_types_set = {decimal.Decimal, str, tuple, frozendict, FileIO, bytes} + + +class Schema: + """ + the base class of all swagger/openapi schemas/models + + ensures that: + - payload passes required validations + - payload is of allowed types + - payload value is an allowed enum value + """ + + @staticmethod + def __get_simple_class(input_value): + """Returns an input_value's simple class that we will use for type checking + + Args: + input_value (class/class_instance): the item for which we will return + the simple class + """ + if isinstance(input_value, tuple): + return tuple + elif isinstance(input_value, frozendict): + return frozendict + elif isinstance(input_value, none_type): + return none_type + elif isinstance(input_value, bytes): + return bytes + elif isinstance(input_value, (io.FileIO, io.BufferedReader)): + return FileIO + elif isinstance(input_value, bool): + # this must be higher than the int check because + # isinstance(True, int) == True + return bool + elif isinstance(input_value, int): + return int + elif isinstance(input_value, float): + return float + elif isinstance(input_value, datetime): + # this must be higher than the date check because + # isinstance(datetime_instance, date) == True + return datetime + elif isinstance(input_value, date): + return date + elif isinstance(input_value, str): + return str + return type(input_value) + + @staticmethod + def __get_valid_classes_phrase(input_classes): + """Returns a string phrase describing what types are allowed""" + all_classes = list(input_classes) + all_classes = sorted(all_classes, key=lambda cls: cls.__name__) + all_class_names = [cls.__name__ for cls in all_classes] + if len(all_class_names) == 1: + return "is {0}".format(all_class_names[0]) + return "is one of [{0}]".format(", ".join(all_class_names)) + + @classmethod + def __type_error_message( + cls, var_value=None, var_name=None, valid_classes=None, key_type=None + ): + """ + Keyword Args: + var_value (any): the variable which has the type_error + var_name (str): the name of the variable which has the typ error + valid_classes (tuple): the accepted classes for current_item's + value + key_type (bool): False if our value is a value in a dict + True if it is a key in a dict + False if our item is an item in a tuple + """ + key_or_value = "value" + if key_type: + key_or_value = "key" + valid_classes_phrase = cls.__get_valid_classes_phrase(valid_classes) + msg = "Invalid type. Required {1} type {2} and " "passed type was {3}".format( + var_name, + key_or_value, + valid_classes_phrase, + type(var_value).__name__, + ) + return msg + + @classmethod + def __get_type_error(cls, var_value, path_to_item, valid_classes, key_type=False): + error_msg = cls.__type_error_message( + var_name=path_to_item[-1], + var_value=var_value, + valid_classes=valid_classes, + key_type=key_type, + ) + return ApiTypeError( + error_msg, + path_to_item=path_to_item, + valid_classes=valid_classes, + key_type=key_type, + ) + + @classmethod + def _class_by_base_class(cls, base_cls: type) -> type: + cls_name = "Dynamic"+cls.__name__ + if base_cls is bool: + new_cls = get_new_class(cls_name, (cls, BoolBase, BoolClass)) + elif base_cls is str: + new_cls = get_new_class(cls_name, (cls, StrBase, str)) + elif base_cls is decimal.Decimal: + new_cls = get_new_class(cls_name, (cls, NumberBase, decimal.Decimal)) + elif base_cls is tuple: + new_cls = get_new_class(cls_name, (cls, ListBase, tuple)) + elif base_cls is frozendict: + new_cls = get_new_class(cls_name, (cls, DictBase, frozendict)) + elif base_cls is none_type: + new_cls = get_new_class(cls_name, (cls, NoneBase, NoneClass)) + log_cache_usage(get_new_class) + return new_cls + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + Schema _validate + Runs all schema validation logic and + returns a dynamic class of different bases depending upon the input + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Use cases: + 1. inheritable type: string/decimal.Decimal/frozendict/tuple + 2. enum value cases: 'hi', 1 -> no base_class set because the enum includes the base class + 3. uninheritable type: True/False/None -> no base_class because the base class is not inheritable + _enum_by_value will handle this use case + + Required Steps: + 1. verify type of input is valid vs the allowed _types + 2. check validations that are applicable for this type of input + 3. if enums exist, check that the value exists in the enum + + Returns: + path_to_schemas: a map of path to schemas + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + arg = args[0] + + base_class = cls.__get_simple_class(arg) + failed_type_check_classes = cls._validate_type(base_class) + if failed_type_check_classes: + raise cls.__get_type_error( + arg, + _instantiation_metadata.path_to_item, + failed_type_check_classes, + key_type=False, + ) + if hasattr(cls, '_validate_validations_pass'): + cls._validate_validations_pass(arg, _instantiation_metadata) + path_to_schemas = defaultdict(set) + path_to_schemas[_instantiation_metadata.path_to_item].add(cls) + + if hasattr(cls, "_enum_by_value"): + cls._validate_enum_value(arg) + return path_to_schemas + + if base_class is none_type or base_class is bool: + return path_to_schemas + + path_to_schemas[_instantiation_metadata.path_to_item].add(base_class) + return path_to_schemas + + @classmethod + def _validate_enum_value(cls, arg): + try: + cls._enum_by_value[arg] + except KeyError: + raise ApiValueError("Invalid value {} passed in to {}, {}".format(arg, cls, cls._enum_value_to_name)) + + @classmethod + def __get_new_cls(cls, arg, _instantiation_metadata: InstantiationMetadata): + """ + PATH 1 - make a new dynamic class and return an instance of that class + We are making an instance of cls, but instead of making cls + make a new class, new_cls + which includes dynamic bases including cls + return an instance of that new class + """ + if ( + _instantiation_metadata.path_to_schemas and + _instantiation_metadata.path_to_item in _instantiation_metadata.path_to_schemas): + chosen_new_cls = _instantiation_metadata.path_to_schemas[_instantiation_metadata.path_to_item] + # print('leaving __get_new_cls early for cls {} because path_to_schemas exists'.format(cls)) + # print(_instantiation_metadata.path_to_item) + # print(chosen_new_cls) + return chosen_new_cls + """ + Dict property + List Item Assignment Use cases: + 1. value is NOT an instance of the required schema class + the value is validated by _validate + _validate returns a key value pair + where the key is the path to the item, and the value will be the required manufactured class + made out of the matching schemas + 2. value is an instance of the the correct schema type + the value is NOT validated by _validate, _validate only checks that the instance is of the correct schema type + for this value, _validate does NOT return an entry for it in _path_to_schemas + and in list/dict _get_items,_get_properties the value will be directly assigned + because value is of the correct type, and validation was run earlier when the instance was created + """ + _path_to_schemas = cls._validate(arg, _instantiation_metadata=_instantiation_metadata) + from pprint import pprint + pprint(dict(_path_to_schemas)) + # loop through it make a new class for each entry + for path, schema_classes in _path_to_schemas.items(): + enum_schema = any( + hasattr(this_cls, '_enum_by_value') for this_cls in schema_classes) + inheritable_primitive_type = schema_classes.intersection(inheritable_primitive_types_set) + chosen_schema_classes = schema_classes + suffix = tuple() + if inheritable_primitive_type: + chosen_schema_classes = schema_classes - inheritable_primitive_types_set + if not enum_schema: + # include the inheritable_primitive_type + suffix = tuple(inheritable_primitive_type) + + if len(chosen_schema_classes) == 1 and not suffix: + mfg_cls = tuple(chosen_schema_classes)[0] + else: + x_schema = schema_descendents & chosen_schema_classes + if x_schema: + x_schema = x_schema.pop() + if any(c is not x_schema and issubclass(c, x_schema) for c in chosen_schema_classes): + # needed to not have a mro error in get_new_class + chosen_schema_classes.remove(x_schema) + used_classes = tuple(sorted(chosen_schema_classes, key=lambda a_cls: a_cls.__name__)) + suffix + mfg_cls = get_new_class(class_name='DynamicSchema', bases=used_classes) + + if inheritable_primitive_type and not enum_schema: + _instantiation_metadata.path_to_schemas[path] = mfg_cls + continue + + # Use case: value is None, True, False, or an enum value + # print('choosing enum class for path {} in arg {}'.format(path, arg)) + value = arg + for key in path[1:]: + value = value[key] + if hasattr(mfg_cls, '_enum_by_value'): + mfg_cls = mfg_cls._enum_by_value[value] + elif value in {True, False}: + mfg_cls = mfg_cls._class_by_base_class(bool) + elif value is None: + mfg_cls = mfg_cls._class_by_base_class(none_type) + else: + raise ApiValueError('Unhandled case value={} bases={}'.format(value, mfg_cls.__bases__)) + _instantiation_metadata.path_to_schemas[path] = mfg_cls + + return _instantiation_metadata.path_to_schemas[_instantiation_metadata.path_to_item] + + @classmethod + def __get_new_instance_without_conversion(cls, arg, _instantiation_metadata): + # PATH 2 - we have a Dynamic class and we are making an instance of it + if issubclass(cls, tuple): + items = cls._get_items(arg, _instantiation_metadata=_instantiation_metadata) + return super(Schema, cls).__new__(cls, items) + elif issubclass(cls, frozendict): + properties = cls._get_properties(arg, _instantiation_metadata=_instantiation_metadata) + return super(Schema, cls).__new__(cls, properties) + """ + str = openapi str, date, and datetime + decimal.Decimal = openapi int and float + FileIO = openapi binary type and the user inputs a file + bytes = openapi binary type and the user inputs bytes + """ + return super(Schema, cls).__new__(cls, arg) + + @classmethod + def _from_openapi_data( + cls, + arg: typing.Union[ + str, + date, + datetime, + int, + float, + decimal.Decimal, + bool, + None, + 'Schema', + dict, + frozendict, + tuple, + list, + io.FileIO, + io.BufferedReader, + bytes + ], + _instantiation_metadata: typing.Optional[InstantiationMetadata] + ): + arg = cast_to_allowed_types(arg, from_server=True) + _instantiation_metadata = InstantiationMetadata(from_server=True) if _instantiation_metadata is None else _instantiation_metadata + if not _instantiation_metadata.from_server: + raise ApiValueError( + 'from_server must be True in this code path, if you need it to be False, use cls()' + ) + new_cls = cls.__get_new_cls(arg, _instantiation_metadata) + new_inst = new_cls.__get_new_instance_without_conversion(arg, _instantiation_metadata) + return new_inst + + @staticmethod + def __get_input_dict(*args, **kwargs) -> frozendict: + input_dict = {} + if args and isinstance(args[0], (dict, frozendict)): + input_dict.update(args[0]) + if kwargs: + input_dict.update(kwargs) + return frozendict(input_dict) + + @staticmethod + def __remove_unsets(kwargs): + return {key: val for key, val in kwargs.items() if val is not unset} + + def __new__(cls, *args: typing.Union[dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, **kwargs: typing.Union[dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset]): + """ + Schema __new__ + + Args: + args (int/float/decimal.Decimal/str/list/tuple/dict/frozendict/bool/None): the value + kwargs (str, int/float/decimal.Decimal/str/list/tuple/dict/frozendict/bool/None): dict values + _instantiation_metadata: contains the needed from_server, configuration, path_to_item + """ + kwargs = cls.__remove_unsets(kwargs) + if not args and not kwargs: + raise TypeError( + 'No input given. args or kwargs must be given.' + ) + if not kwargs and args and not isinstance(args[0], dict): + arg = args[0] + else: + arg = cls.__get_input_dict(*args, **kwargs) + _instantiation_metadata = InstantiationMetadata() if _instantiation_metadata is None else _instantiation_metadata + if _instantiation_metadata.from_server: + raise ApiValueError( + 'from_server must be False in this code path, if you need it to be True, use cls._from_openapi_data()' + ) + arg = cast_to_allowed_types(arg, from_server=_instantiation_metadata.from_server) + new_cls = cls.__get_new_cls(arg, _instantiation_metadata) + return new_cls.__get_new_instance_without_conversion(arg, _instantiation_metadata) + + def __init__( + self, + *args: typing.Union[ + dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema'], + _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, + **kwargs: typing.Union[ + dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, 'Schema', Unset + ] + ): + """ + this is needed to fix 'Unexpected argument' warning in pycharm + this code does nothing because all Schema instances are immutable + this means that all input data is passed into and used in new, and after the new instance is made + no new attributes are assigned and init is not used + """ + pass + + +def cast_to_allowed_types(arg: typing.Union[str, date, datetime, decimal.Decimal, int, float, None, dict, frozendict, list, tuple, bytes, Schema], from_server=False) -> typing.Union[str, bytes, decimal.Decimal, None, frozendict, tuple, Schema]: + """ + from_server=False date, datetime -> str + int, float -> Decimal + StrSchema will convert that to bytes and remember the encoding when we pass in str input + """ + if isinstance(arg, (date, datetime)): + if not from_server: + return arg.isoformat() + # ApiTypeError will be thrown later by _validate_type + return arg + elif isinstance(arg, bool): + """ + this check must come before isinstance(arg, (int, float)) + because isinstance(True, int) is True + """ + return arg + elif isinstance(arg, decimal.Decimal): + return arg + elif isinstance(arg, int): + return decimal.Decimal(arg) + elif isinstance(arg, float): + decimal_from_float = decimal.Decimal(arg) + if decimal_from_float.as_integer_ratio()[1] == 1: + # 9.0 -> Decimal('9.0') + # 3.4028234663852886e+38 -> Decimal('340282346638528859811704183484516925440.0') + return decimal.Decimal(str(decimal_from_float)+'.0') + return decimal_from_float + elif isinstance(arg, str): + return arg + elif isinstance(arg, bytes): + return arg + elif isinstance(arg, (io.FileIO, io.BufferedReader)): + if arg.closed: + raise ApiValueError('Invalid file state; file is closed and must be open') + return arg + elif type(arg) is list or type(arg) is tuple: + return tuple([cast_to_allowed_types(item) for item in arg]) + elif type(arg) is dict or type(arg) is frozendict: + return frozendict({key: cast_to_allowed_types(val) for key, val in arg.items() if val is not unset}) + elif arg is None: + return arg + elif isinstance(arg, Schema): + return arg + raise ValueError('Invalid type passed in got input={} type={}'.format(arg, type(arg))) + + +class ComposedBase(Discriminable): + + @classmethod + def __get_allof_classes(cls, *args, _instantiation_metadata: InstantiationMetadata): + path_to_schemas = defaultdict(set) + for allof_cls in cls._composed_schemas['allOf']: + if allof_cls in _instantiation_metadata.base_classes: + continue + other_path_to_schemas = allof_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + return path_to_schemas + + @classmethod + def __get_oneof_class( + cls, + *args, + discriminated_cls, + _instantiation_metadata: InstantiationMetadata, + path_to_schemas: typing.Dict[typing.Tuple, typing.Set[typing.Type[Schema]]] + ): + oneof_classes = [] + chosen_oneof_cls = None + original_base_classes = _instantiation_metadata.base_classes + new_base_classes = _instantiation_metadata.base_classes + path_to_schemas = defaultdict(set) + for oneof_cls in cls._composed_schemas['oneOf']: + if oneof_cls in path_to_schemas[_instantiation_metadata.path_to_item]: + oneof_classes.append(oneof_cls) + continue + if isinstance(args[0], oneof_cls): + # passed in instance is the correct type + chosen_oneof_cls = oneof_cls + oneof_classes.append(oneof_cls) + continue + _instantiation_metadata.base_classes = original_base_classes + try: + path_to_schemas = oneof_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + new_base_classes = _instantiation_metadata.base_classes + except (ApiValueError, ApiTypeError) as ex: + if discriminated_cls is not None and oneof_cls is discriminated_cls: + raise ex + continue + chosen_oneof_cls = oneof_cls + oneof_classes.append(oneof_cls) + if not oneof_classes: + raise ApiValueError( + "Invalid inputs given to generate an instance of {}. None " + "of the oneOf schemas matched the input data.".format(cls) + ) + elif len(oneof_classes) > 1: + raise ApiValueError( + "Invalid inputs given to generate an instance of {}. Multiple " + "oneOf schemas {} matched the inputs, but a max of one is allowed.".format(cls, oneof_classes) + ) + _instantiation_metadata.base_classes = new_base_classes + return path_to_schemas + + @classmethod + def __get_anyof_classes( + cls, + *args, + discriminated_cls, + _instantiation_metadata: InstantiationMetadata + ): + anyof_classes = [] + chosen_anyof_cls = None + original_base_classes = _instantiation_metadata.base_classes + path_to_schemas = defaultdict(set) + for anyof_cls in cls._composed_schemas['anyOf']: + if anyof_cls in _instantiation_metadata.base_classes: + continue + if isinstance(args[0], anyof_cls): + # passed in instance is the correct type + chosen_anyof_cls = anyof_cls + anyof_classes.append(anyof_cls) + continue + + _instantiation_metadata.base_classes = original_base_classes + try: + other_path_to_schemas = anyof_cls._validate(*args, _instantiation_metadata=_instantiation_metadata) + except (ApiValueError, ApiTypeError) as ex: + if discriminated_cls is not None and anyof_cls is discriminated_cls: + raise ex + continue + original_base_classes = _instantiation_metadata.base_classes + chosen_anyof_cls = anyof_cls + anyof_classes.append(anyof_cls) + update(path_to_schemas, other_path_to_schemas) + if not anyof_classes: + raise ApiValueError( + "Invalid inputs given to generate an instance of {}. None " + "of the anyOf schemas matched the input data.".format(cls) + ) + return path_to_schemas + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + ComposedBase _validate + We return dynamic classes of different bases depending upon the inputs + This makes it so: + - the returned instance is always a subclass of our defining schema + - this allows us to check type based on whether an instance is a subclass of a schema + - the returned instance is a serializable type (except for None, True, and False) which are enums + + Returns: + new_cls (type): the new class + + Raises: + ApiValueError: when a string can't be converted into a date or datetime and it must be one of those classes + ApiTypeError: when the input type is not in the list of allowed spec types + """ + if args and isinstance(args[0], Schema) and _instantiation_metadata.from_server is False: + if isinstance(args[0], cls): + # an instance of the correct type was passed in + return {} + raise ApiTypeError( + 'Incorrect type passed in, required type was {} and passed type was {} at {}'.format( + cls, + type(args[0]), + _instantiation_metadata.path_to_item + ) + ) + + # validation checking on types, validations, and enums + path_to_schemas = super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + _instantiation_metadata.base_classes |= frozenset({cls}) + + # process composed schema + _discriminator = getattr(cls, '_discriminator', None) + discriminated_cls = None + if _discriminator and args and isinstance(args[0], frozendict): + disc_property_name = list(_discriminator.keys())[0] + cls._ensure_discriminator_value_present(disc_property_name, _instantiation_metadata, *args) + # get discriminated_cls by looking at the dict in the current class + discriminated_cls = cls._get_discriminated_class( + disc_property_name=disc_property_name, disc_payload_value=args[0][disc_property_name]) + if discriminated_cls is None: + raise ApiValueError( + "Invalid discriminator value '{}' was passed in to {}.{} Only the values {} are allowed at {}".format( + args[0][disc_property_name], + cls.__name__, + disc_property_name, + list(_discriminator[disc_property_name].keys()), + _instantiation_metadata.path_to_item + (disc_property_name,) + ) + ) + + if cls._composed_schemas['allOf']: + other_path_to_schemas = cls.__get_allof_classes(*args, _instantiation_metadata=_instantiation_metadata) + update(path_to_schemas, other_path_to_schemas) + if cls._composed_schemas['oneOf']: + other_path_to_schemas = cls.__get_oneof_class( + *args, + discriminated_cls=discriminated_cls, + _instantiation_metadata=_instantiation_metadata, + path_to_schemas=path_to_schemas + ) + update(path_to_schemas, other_path_to_schemas) + if cls._composed_schemas['anyOf']: + other_path_to_schemas = cls.__get_anyof_classes( + *args, + discriminated_cls=discriminated_cls, + _instantiation_metadata=_instantiation_metadata + ) + update(path_to_schemas, other_path_to_schemas) + + if discriminated_cls is not None: + # TODO use an exception from this package here + assert discriminated_cls in path_to_schemas[_instantiation_metadata.path_to_item] + return path_to_schemas + + +# DictBase, ListBase, NumberBase, StrBase, BoolBase, NoneBase +class ComposedSchema( + _SchemaTypeChecker(typing.Union[none_type, str, decimal.Decimal, bool, tuple, frozendict]), + ComposedBase, + DictBase, + ListBase, + NumberBase, + StrBase, + BoolBase, + NoneBase, + Schema +): + + # subclass properties + _composed_schemas = {} + + @classmethod + def _from_openapi_data(cls, *args: typing.Any, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None, **kwargs): + if not args: + if not kwargs: + raise ApiTypeError('{} is missing required input data in args or kwargs'.format(cls.__name__)) + args = (kwargs, ) + return super()._from_openapi_data(args[0], _instantiation_metadata=_instantiation_metadata) + + +class ListSchema( + _SchemaTypeChecker(typing.Union[tuple]), + ListBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.List[typing.Any], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[list, tuple], **kwargs: InstantiationMetadata): + return super().__new__(cls, arg, **kwargs) + + +class NoneSchema( + _SchemaTypeChecker(typing.Union[none_type]), + NoneBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: None, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: None, **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class NumberSchema( + _SchemaTypeChecker(typing.Union[decimal.Decimal]), + NumberBase, + Schema +): + """ + This is used for type: number with no format + Both integers AND floats are accepted + """ + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[int, float, decimal.Decimal], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[decimal.Decimal, int, float], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class IntBase(NumberBase): + @property + def as_int(self) -> int: + try: + return self._as_int + except AttributeError: + self._as_int = int(self) + return self._as_int + + @classmethod + def _validate_format(cls, arg: typing.Optional[decimal.Decimal], _instantiation_metadata: InstantiationMetadata): + if isinstance(arg, decimal.Decimal): + exponent = arg.as_tuple().exponent + if exponent != 0: + raise ApiValueError( + "Invalid value '{}' for type integer at {}".format(arg, _instantiation_metadata.path_to_item) + ) + + @classmethod + def _validate(cls, *args, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + """ + IntBase _validate + TODO what about types = (int, number) -> IntBase, NumberBase? We could drop int and keep number only + """ + cls._validate_format(args[0], _instantiation_metadata=_instantiation_metadata) + return super()._validate(*args, _instantiation_metadata=_instantiation_metadata) + + +class IntSchema(IntBase, NumberSchema): + + @classmethod + def _from_openapi_data(cls, arg: int, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[decimal.Decimal, int], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class Int32Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-2147483648), + inclusive_maximum=decimal.Decimal(2147483647) + ), + IntSchema +): + pass + +class Int64Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-9223372036854775808), + inclusive_maximum=decimal.Decimal(9223372036854775807) + ), + IntSchema +): + pass + + +class Float32Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-3.4028234663852886e+38), + inclusive_maximum=decimal.Decimal(3.4028234663852886e+38) + ), + NumberSchema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[float, decimal.Decimal], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + # todo check format + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + +class Float64Schema( + _SchemaValidator( + inclusive_minimum=decimal.Decimal(-1.7976931348623157E+308), + inclusive_maximum=decimal.Decimal(1.7976931348623157E+308) + ), + NumberSchema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[float, decimal.Decimal], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + # todo check format + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + +class StrSchema( + _SchemaTypeChecker(typing.Union[str]), + StrBase, + Schema +): + """ + date + datetime string types must inherit from this class + That is because one can validate a str payload as both: + - type: string (format unset) + - type: string, format: date + """ + + @classmethod + def _from_openapi_data(cls, arg: typing.Union[str], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None) -> 'StrSchema': + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: typing.Union[str, date, datetime], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class DateSchema(DateBase, StrSchema): + + def __new__(cls, arg: typing.Union[str, datetime], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class DateTimeSchema(DateTimeBase, StrSchema): + + def __new__(cls, arg: typing.Union[str, datetime], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class DecimalSchema(DecimalBase, StrSchema): + + def __new__(cls, arg: typing.Union[str], **kwargs: typing.Union[InstantiationMetadata]): + """ + Note: Decimals may not be passed in because cast_to_allowed_types is only invoked once for payloads + which can be simple (str) or complex (dicts or lists with nested values) + Because casting is only done once and recursively casts all values prior to validation then for a potential + client side Decimal input if Decimal was accepted as an input in DecimalSchema then one would not know + if one was using it for a StrSchema (where it should be cast to str) or one is using it for NumberSchema + where it should stay as Decimal. + """ + return super().__new__(cls, arg, **kwargs) + + +class BytesSchema( + _SchemaTypeChecker(typing.Union[bytes]), + Schema, +): + """ + this class will subclass bytes and is immutable + """ + def __new__(cls, arg: typing.Union[bytes], **kwargs: typing.Union[InstantiationMetadata]): + return super(Schema, cls).__new__(cls, arg) + + +class FileSchema( + _SchemaTypeChecker(typing.Union[FileIO]), + Schema, +): + """ + This class is NOT immutable + Dynamic classes are built using it for example when AnyType allows in binary data + Al other schema classes ARE immutable + If one wanted to make this immutable one could make this a DictSchema with required properties: + - data = BytesSchema (which would be an immutable bytes based schema) + - file_name = StrSchema + and cast_to_allowed_types would convert bytes and file instances into dicts containing data + file_name + The downside would be that data would be stored in memory which one may not want to do for very large files + + The developer is responsible for closing this file and deleting it + + This class was kept as mutable: + - to allow file reading and writing to disk + - to be able to preserve file name info + """ + + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader], **kwargs: typing.Union[InstantiationMetadata]): + return super(Schema, cls).__new__(cls, arg) + + +class BinaryBase: + pass + + +class BinarySchema( + _SchemaTypeChecker(typing.Union[bytes, FileIO]), + ComposedBase, + BinaryBase, + Schema, +): + + @classmethod + @property + def _composed_schemas(cls): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + return { + 'allOf': [], + 'oneOf': [ + BytesSchema, + FileSchema, + ], + 'anyOf': [ + ], + } + + def __new__(cls, arg: typing.Union[io.FileIO, io.BufferedReader, bytes], **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg) + + +class BoolSchema( + _SchemaTypeChecker(typing.Union[bool]), + BoolBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: bool, _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, arg: bool, **kwargs: typing.Union[InstantiationMetadata]): + return super().__new__(cls, arg, **kwargs) + + +class AnyTypeSchema( + _SchemaTypeChecker( + typing.Union[frozendict, tuple, decimal.Decimal, str, bool, none_type, bytes, FileIO] + ), + DictBase, + ListBase, + NumberBase, + StrBase, + BoolBase, + NoneBase, + Schema +): + pass + + +class DictSchema( + _SchemaTypeChecker(typing.Union[frozendict]), + DictBase, + Schema +): + + @classmethod + def _from_openapi_data(cls, arg: typing.Dict[str, typing.Any], _instantiation_metadata: typing.Optional[InstantiationMetadata] = None): + return super()._from_openapi_data(arg, _instantiation_metadata=_instantiation_metadata) + + def __new__(cls, *args: typing.Union[dict, frozendict], **kwargs: typing.Union[dict, frozendict, list, tuple, decimal.Decimal, float, int, str, date, datetime, bool, None, bytes, Schema, Unset, InstantiationMetadata]): + return super().__new__(cls, *args, **kwargs) + + +schema_descendents = set([NoneSchema, DictSchema, ListSchema, NumberSchema, StrSchema, BoolSchema]) + + +def deserialize_file(response_data, configuration, content_disposition=None): + """Deserializes body to file + + Saves response body into a file in a temporary folder, + using the filename from the `Content-Disposition` header if provided. + + Args: + param response_data (str): the file data to write + configuration (Configuration): the instance to use to convert files + + Keyword Args: + content_disposition (str): the value of the Content-Disposition + header + + Returns: + (file_type): the deserialized file which is open + The user is responsible for closing and reading the file + """ + fd, path = tempfile.mkstemp(dir=configuration.temp_folder_path) + os.close(fd) + os.remove(path) + + if content_disposition: + filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?', + content_disposition).group(1) + path = os.path.join(os.path.dirname(path), filename) + + with open(path, "wb") as f: + if isinstance(response_data, str): + # change str to bytes so we can write it + response_data = response_data.encode('utf-8') + f.write(response_data) + + f = open(path, "rb") + return f + + +@functools.cache +def get_new_class( + class_name: str, + bases: typing.Tuple[typing.Type[typing.Union[Schema, typing.Any]], ...] +) -> typing.Type[Schema]: + """ + Returns a new class that is made with the subclass bases + """ + return type(class_name, bases, {}) + + +LOG_CACHE_USAGE = False + + +def log_cache_usage(cache_fn): + if LOG_CACHE_USAGE: + print(cache_fn.__name__, cache_fn.cache_info()) diff --git a/libraries/models/cloudharness_model/typing_utils.py b/libraries/models/cloudharness_model/typing_utils.py new file mode 100644 index 00000000..0563f81f --- /dev/null +++ b/libraries/models/cloudharness_model/typing_utils.py @@ -0,0 +1,32 @@ +# coding: utf-8 + +import sys + +if sys.version_info < (3, 7): + import typing + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return type(klass) == typing.GenericMeta + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__extra__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__extra__ == list + +else: + + def is_generic(klass): + """ Determine whether klass is a generic class """ + return hasattr(klass, '__origin__') + + def is_dict(klass): + """ Determine whether klass is a Dict """ + return klass.__origin__ == dict + + def is_list(klass): + """ Determine whether klass is a List """ + return klass.__origin__ == list diff --git a/libraries/models/cloudharness_model/util.py b/libraries/models/cloudharness_model/util.py new file mode 100644 index 00000000..b1c85b86 --- /dev/null +++ b/libraries/models/cloudharness_model/util.py @@ -0,0 +1,161 @@ +import datetime + +import six +import typing +from cloudharness_model import typing_utils + + +def _deserialize(data, klass): + """Deserializes dict, list, str into an object. + + :param data: dict, list or str. + :param klass: class literal, or string of class name. + + :return: object. + """ + if data is None: + return None + + if klass in six.integer_types or klass in (float, str, bool, bytearray): + return _deserialize_primitive(data, klass) + elif klass == object: + return _deserialize_object(data) + elif klass == datetime.date: + return deserialize_date(data) + elif klass == datetime.datetime: + return deserialize_datetime(data) + elif typing_utils.is_generic(klass): + if typing_utils.is_list(klass): + return _deserialize_list(data, klass.__args__[0]) + if typing_utils.is_dict(klass): + return _deserialize_dict(data, klass.__args__[1]) + else: + return deserialize_model(data, klass) + + +def _deserialize_primitive(data, klass): + """Deserializes to primitive type. + + :param data: data to deserialize. + :param klass: class literal. + + :return: int, long, float, str, bool. + :rtype: int | long | float | str | bool + """ + try: + value = klass(data) + except UnicodeEncodeError: + value = six.u(data) + except TypeError: + value = data + return value + + +def _deserialize_object(value): + """Return an original value. + + :return: object. + """ + return value + + +def deserialize_date(string): + """Deserializes string to date. + + :param string: str. + :type string: str + :return: date. + :rtype: date + """ + if string is None: + return None + + try: + from dateutil.parser import parse + return parse(string).date() + except ImportError: + return string + + +def deserialize_datetime(string): + """Deserializes string to datetime. + + The string should be in iso8601 datetime format. + + :param string: str. + :type string: str + :return: datetime. + :rtype: datetime + """ + if string is None: + return None + + try: + from dateutil.parser import parse + return parse(string) + except ImportError: + return string + + + + +def deserialize_model(data, klass): + """Deserializes list or dict to model. + + :param data: dict, list. + :type data: dict | list + :param klass: class literal. + :return: model object. + """ + instance = klass() + instance._raw_dict = data + + if not instance.openapi_types: + return data + if data is None: + return instance + + if isinstance(data, list): + for attr, attr_type in six.iteritems(instance.openapi_types): + if instance.attribute_map[attr] in data: + value = data[instance.attribute_map[attr]] + setattr(instance, attr, _deserialize(value, attr_type)) + + elif isinstance(data, dict): + + for attr in data: + value = data[attr] + if attr in instance.attribute_map: + setattr(instance, attr, _deserialize(value, instance.openapi_types[attr])) + else: + setattr(instance, attr, value) + + return instance + + +def _deserialize_list(data, boxed_type): + """Deserializes a list and its elements. + + :param data: list to deserialize. + :type data: list + :param boxed_type: class literal. + + :return: deserialized list. + :rtype: list + """ + return [_deserialize(sub_data, boxed_type) + for sub_data in data] + + +def _deserialize_dict(data, boxed_type): + """Deserializes a dict and its elements. + + :param data: dict to deserialize. + :type data: dict + :param boxed_type: class literal. + + :return: deserialized dict. + :rtype: dict + """ + return {k: _deserialize(v, boxed_type) + for k, v in six.iteritems(data)} diff --git a/libraries/models/docs/ApplicationConfig.md b/libraries/models/docs/ApplicationConfig.md new file mode 100644 index 00000000..4e37aaf4 --- /dev/null +++ b/libraries/models/docs/ApplicationConfig.md @@ -0,0 +1,12 @@ +# ApplicationConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**harness** | [**ApplicationHarnessConfig**](ApplicationHarnessConfig.md) | | +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/ApplicationDependenciesConfig.md b/libraries/models/docs/ApplicationDependenciesConfig.md new file mode 100644 index 00000000..6409549d --- /dev/null +++ b/libraries/models/docs/ApplicationDependenciesConfig.md @@ -0,0 +1,14 @@ +# ApplicationDependenciesConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**hard** | **[str]** | Hard dependencies indicate that the application may not start without these other applications. | [optional] +**soft** | **[str]** | Soft dependencies indicate that the application will work partially without these other applications. | [optional] +**build** | **[str]** | Hard dependencies indicate that the application Docker image build requires these base/common images | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/ApplicationHarnessConfig.md b/libraries/models/docs/ApplicationHarnessConfig.md new file mode 100644 index 00000000..6d8fc882 --- /dev/null +++ b/libraries/models/docs/ApplicationHarnessConfig.md @@ -0,0 +1,29 @@ +# ApplicationHarnessConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**deployment** | [**DeploymentAutoArtifactConfig**](DeploymentAutoArtifactConfig.md) | | [optional] +**service** | [**ServiceAutoArtifactConfig**](ServiceAutoArtifactConfig.md) | | [optional] +**subdomain** | **str** | If specified, an ingress will be created at [subdomain].[.Values.domain] | [optional] +**aliases** | **[str]** | If specified, an ingress will be created at [alias].[.Values.domain] for each alias | [optional] +**domain** | **str** | If specified, an ingress will be created at [domain] | [optional] +**dependencies** | [**ApplicationDependenciesConfig**](ApplicationDependenciesConfig.md) | | [optional] +**secured** | **bool** | When true, the application is shielded with a getekeeper | [optional] +**uri_role_mapping** | [**[UriRoleMappingConfig]**](UriRoleMappingConfig.md) | Map uri/roles to secure with the Gatekeeper (if `secured: true`) | [optional] +**secrets** | [**SimpleMap**](SimpleMap.md) | | [optional] +**use_services** | **[str]** | Specify which services this application uses in the frontend to create proxy ingresses. e.g. ``` - name: samples ``` | [optional] +**database** | [**DatabaseDeploymentConfig**](DatabaseDeploymentConfig.md) | | [optional] +**resources** | [**[FileResourcesConfig]**](FileResourcesConfig.md) | Application file resources. Maps from deploy/resources folder and mounts as configmaps | [optional] +**readiness_probe** | [**ApplicationProbe**](ApplicationProbe.md) | | [optional] +**startup_probe** | [**ApplicationProbe**](ApplicationProbe.md) | | [optional] +**liveness_probe** | [**ApplicationProbe**](ApplicationProbe.md) | | [optional] +**source_root** | [**Filename**](Filename.md) | | [optional] +**name** | **str** | | [optional] +**jupyterhub** | [**JupyterHubConfig**](JupyterHubConfig.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/ApplicationProbe.md b/libraries/models/docs/ApplicationProbe.md new file mode 100644 index 00000000..77f6990b --- /dev/null +++ b/libraries/models/docs/ApplicationProbe.md @@ -0,0 +1,15 @@ +# ApplicationProbe + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**path** | **str** | | +**period_seconds** | **float** | | [optional] +**failure_threshold** | **float** | | [optional] +**initial_delay_seconds** | **float** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/ApplicationsConfigsMap.md b/libraries/models/docs/ApplicationsConfigsMap.md new file mode 100644 index 00000000..ed15a264 --- /dev/null +++ b/libraries/models/docs/ApplicationsConfigsMap.md @@ -0,0 +1,11 @@ +# ApplicationsConfigsMap + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**any string name** | [**ApplicationConfig**](ApplicationConfig.md) | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/AutoArtifactSpec.md b/libraries/models/docs/AutoArtifactSpec.md new file mode 100644 index 00000000..9f80e997 --- /dev/null +++ b/libraries/models/docs/AutoArtifactSpec.md @@ -0,0 +1,13 @@ +# AutoArtifactSpec + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auto** | **bool** | When true, enables automatic template | +**name** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/BackupConfig.md b/libraries/models/docs/BackupConfig.md new file mode 100644 index 00000000..7e08ba44 --- /dev/null +++ b/libraries/models/docs/BackupConfig.md @@ -0,0 +1,20 @@ +# BackupConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**dir** | [**Filename**](Filename.md) | | +**resources** | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | | +**active** | **bool** | | [optional] +**keep_days** | **int** | | [optional] +**keep_weeks** | **int** | | [optional] +**keep_months** | **int** | | [optional] +**schedule** | **str** | Cron expression | [optional] +**suffix** | **bool, date, datetime, dict, float, int, list, str, none_type** | The file suffix added to backup files | [optional] +**volumesize** | **str** | The volume size for backups (all backups share the same volume) | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/CDCEvent.md b/libraries/models/docs/CDCEvent.md new file mode 100644 index 00000000..ef9c1a49 --- /dev/null +++ b/libraries/models/docs/CDCEvent.md @@ -0,0 +1,17 @@ +# CDCEvent + +A message sent to the orchestration queue. Applications can listen to these events to react to data change events happening on other applications. + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**operation** | **str** | the operation on the object e.g. create / update / delete | +**uid** | **str** | the unique identifier attribute of the object | +**message_type** | **str** | the type of the message (relates to the object type) e.g. jobs | +**meta** | [**CDCEventMeta**](CDCEventMeta.md) | | +**resource** | [**FreeObject**](FreeObject.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/CDCEventMeta.md b/libraries/models/docs/CDCEventMeta.md new file mode 100644 index 00000000..a90f4b95 --- /dev/null +++ b/libraries/models/docs/CDCEventMeta.md @@ -0,0 +1,16 @@ +# CDCEventMeta + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**app_name** | **str** | The name of the application/microservice sending the message | +**user** | [**User**](User.md) | | [optional] +**args** | [**[FreeObject]**](FreeObject.md) | the caller function arguments | [optional] +**kwargs** | **bool, date, datetime, dict, float, int, list, str, none_type** | the caller function keyword arguments | [optional] +**description** | **str** | General description -- for human consumption | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/CpuMemoryConfig.md b/libraries/models/docs/CpuMemoryConfig.md new file mode 100644 index 00000000..48d91152 --- /dev/null +++ b/libraries/models/docs/CpuMemoryConfig.md @@ -0,0 +1,13 @@ +# CpuMemoryConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**cpu** | **str** | | [optional] +**memory** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/DatabaseDeploymentConfig.md b/libraries/models/docs/DatabaseDeploymentConfig.md new file mode 100644 index 00000000..570d31b5 --- /dev/null +++ b/libraries/models/docs/DatabaseDeploymentConfig.md @@ -0,0 +1,21 @@ +# DatabaseDeploymentConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auto** | **bool** | When true, enables automatic template | +**type** | **str** | Define the database type. One of (mongo, postgres, neo4j) | [optional] +**size** | **str** | Specify database disk size | [optional] +**user** | **str** | database username | [optional] +**_pass** | **str** | Database password | [optional] +**mongo** | [**FreeObject**](FreeObject.md) | | [optional] +**postgres** | [**FreeObject**](FreeObject.md) | | [optional] +**neo4j** | **bool, date, datetime, dict, float, int, list, str, none_type** | Neo4j database specific configuration | [optional] +**resources** | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | | [optional] +**name** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/DatabaseDeploymentConfigAllOf.md b/libraries/models/docs/DatabaseDeploymentConfigAllOf.md new file mode 100644 index 00000000..86e092dc --- /dev/null +++ b/libraries/models/docs/DatabaseDeploymentConfigAllOf.md @@ -0,0 +1,19 @@ +# DatabaseDeploymentConfigAllOf + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**type** | **str** | Define the database type. One of (mongo, postgres, neo4j) | [optional] +**size** | **str** | Specify database disk size | [optional] +**user** | **str** | database username | [optional] +**_pass** | **str** | Database password | [optional] +**mongo** | [**FreeObject**](FreeObject.md) | | [optional] +**postgres** | [**FreeObject**](FreeObject.md) | | [optional] +**neo4j** | **bool, date, datetime, dict, float, int, list, str, none_type** | Neo4j database specific configuration | [optional] +**resources** | [**DeploymentResourcesConf**](DeploymentResourcesConf.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/DeploymentAutoArtifactConfig.md b/libraries/models/docs/DeploymentAutoArtifactConfig.md new file mode 100644 index 00000000..875e4d51 --- /dev/null +++ b/libraries/models/docs/DeploymentAutoArtifactConfig.md @@ -0,0 +1,17 @@ +# DeploymentAutoArtifactConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auto** | **bool** | When true, enables automatic template | +**port** | **bool, date, datetime, dict, float, int, list, str, none_type** | Deployment port | [optional] +**replicas** | **int** | Number of replicas | [optional] +**image** | **str** | Image name to use in the deployment. Leave it blank to set from the application's Docker file | [optional] +**resources** | **bool, date, datetime, dict, float, int, list, str, none_type** | Deployment resources | [optional] +**name** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/DeploymentResourcesConf.md b/libraries/models/docs/DeploymentResourcesConf.md new file mode 100644 index 00000000..b9788bb8 --- /dev/null +++ b/libraries/models/docs/DeploymentResourcesConf.md @@ -0,0 +1,13 @@ +# DeploymentResourcesConf + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**requests** | [**CpuMemoryConfig**](CpuMemoryConfig.md) | | [optional] +**limits** | [**CpuMemoryConfig**](CpuMemoryConfig.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/FileResourcesConfig.md b/libraries/models/docs/FileResourcesConfig.md new file mode 100644 index 00000000..af671647 --- /dev/null +++ b/libraries/models/docs/FileResourcesConfig.md @@ -0,0 +1,14 @@ +# FileResourcesConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | [**Filename**](Filename.md) | | +**src** | [**Filename**](Filename.md) | | +**dst** | **str** | | +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/Filename.md b/libraries/models/docs/Filename.md new file mode 100644 index 00000000..0230048f --- /dev/null +++ b/libraries/models/docs/Filename.md @@ -0,0 +1,11 @@ +# Filename + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**value** | **str** | | + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/FreeObject.md b/libraries/models/docs/FreeObject.md new file mode 100644 index 00000000..2d0bc683 --- /dev/null +++ b/libraries/models/docs/FreeObject.md @@ -0,0 +1,11 @@ +# FreeObject + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/HarnessMainConfig.md b/libraries/models/docs/HarnessMainConfig.md new file mode 100644 index 00000000..b563b5c5 --- /dev/null +++ b/libraries/models/docs/HarnessMainConfig.md @@ -0,0 +1,24 @@ +# HarnessMainConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**local** | **bool** | If set to true, local DNS mapping is added to pods. | +**secured_gatekeepers** | **bool** | Enables/disables Gatekeepers on secured applications. Set to false for testing/development | +**domain** | **str** | The root domain | +**namespace** | **str** | The K8s namespace. | +**mainapp** | **str** | Defines the app to map to the root domain | +**apps** | [**ApplicationsConfigsMap**](ApplicationsConfigsMap.md) | | +**registry** | [**RegistryConfig**](RegistryConfig.md) | | [optional] +**tag** | **str** | Docker tag used to push/pull the built images. | [optional] +**env** | [**[NameValue]**](NameValue.md) | Environmental variables added to all pods | [optional] +**privenv** | [**NameValue**](NameValue.md) | | [optional] +**backup** | [**BackupConfig**](BackupConfig.md) | | [optional] +**name** | **str** | Base name | [optional] +**task_images** | [**SimpleMap**](SimpleMap.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/IngressConfig.md b/libraries/models/docs/IngressConfig.md new file mode 100644 index 00000000..f1803ea2 --- /dev/null +++ b/libraries/models/docs/IngressConfig.md @@ -0,0 +1,15 @@ +# IngressConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auto** | **bool** | When true, enables automatic template | +**ssl_redirect** | **bool** | | [optional] +**letsencrypt** | [**IngressConfigAllOfLetsencrypt**](IngressConfigAllOfLetsencrypt.md) | | [optional] +**name** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/IngressConfigAllOf.md b/libraries/models/docs/IngressConfigAllOf.md new file mode 100644 index 00000000..3ba52e51 --- /dev/null +++ b/libraries/models/docs/IngressConfigAllOf.md @@ -0,0 +1,13 @@ +# IngressConfigAllOf + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**ssl_redirect** | **bool** | | [optional] +**letsencrypt** | [**IngressConfigAllOfLetsencrypt**](IngressConfigAllOfLetsencrypt.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/IngressConfigAllOfLetsencrypt.md b/libraries/models/docs/IngressConfigAllOfLetsencrypt.md new file mode 100644 index 00000000..d5c218ea --- /dev/null +++ b/libraries/models/docs/IngressConfigAllOfLetsencrypt.md @@ -0,0 +1,12 @@ +# IngressConfigAllOfLetsencrypt + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**email** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/JupyterHubConfig.md b/libraries/models/docs/JupyterHubConfig.md new file mode 100644 index 00000000..7c7f2361 --- /dev/null +++ b/libraries/models/docs/JupyterHubConfig.md @@ -0,0 +1,15 @@ +# JupyterHubConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**args** | **[str]** | arguments passed to the container | [optional] +**extra_config** | [**SimpleMap**](SimpleMap.md) | | [optional] +**spawner_extra_config** | [**FreeObject**](FreeObject.md) | | [optional] +**application_hook** | **bool, date, datetime, dict, float, int, list, str, none_type** | change the hook function (advanced) Specify the Python name of the function (full module path, the module must be installed in the Docker image) | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/NameValue.md b/libraries/models/docs/NameValue.md new file mode 100644 index 00000000..0a2fea07 --- /dev/null +++ b/libraries/models/docs/NameValue.md @@ -0,0 +1,13 @@ +# NameValue + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**value** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/PathSpecifier.md b/libraries/models/docs/PathSpecifier.md new file mode 100644 index 00000000..baa172d4 --- /dev/null +++ b/libraries/models/docs/PathSpecifier.md @@ -0,0 +1,11 @@ +# PathSpecifier + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**value** | **str** | | + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/RegistryConfig.md b/libraries/models/docs/RegistryConfig.md new file mode 100644 index 00000000..419be9e9 --- /dev/null +++ b/libraries/models/docs/RegistryConfig.md @@ -0,0 +1,13 @@ +# RegistryConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**name** | **str** | | +**secret** | **str** | Optional secret used for pulling from docker registry. | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/ServiceAutoArtifactConfig.md b/libraries/models/docs/ServiceAutoArtifactConfig.md new file mode 100644 index 00000000..4f382876 --- /dev/null +++ b/libraries/models/docs/ServiceAutoArtifactConfig.md @@ -0,0 +1,14 @@ +# ServiceAutoArtifactConfig + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**auto** | **bool** | When true, enables automatic template | +**port** | **int** | Service port | [optional] +**name** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/ServiceAutoArtifactConfigAllOf.md b/libraries/models/docs/ServiceAutoArtifactConfigAllOf.md new file mode 100644 index 00000000..ca629355 --- /dev/null +++ b/libraries/models/docs/ServiceAutoArtifactConfigAllOf.md @@ -0,0 +1,12 @@ +# ServiceAutoArtifactConfigAllOf + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**port** | **int** | Service port | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/SimpleMap.md b/libraries/models/docs/SimpleMap.md new file mode 100644 index 00000000..380d5da2 --- /dev/null +++ b/libraries/models/docs/SimpleMap.md @@ -0,0 +1,11 @@ +# SimpleMap + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**any string name** | **str** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/UriRoleMappingConfig.md b/libraries/models/docs/UriRoleMappingConfig.md new file mode 100644 index 00000000..4ea3fb85 --- /dev/null +++ b/libraries/models/docs/UriRoleMappingConfig.md @@ -0,0 +1,14 @@ +# UriRoleMappingConfig + +Defines the application Gatekeeper configuration, if enabled (i.e. `secured: true`. + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**uri** | [**PathSpecifier**](PathSpecifier.md) | | +**roles** | **[str]** | Roles allowed to access the present uri | +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/User.md b/libraries/models/docs/User.md new file mode 100644 index 00000000..443f33d7 --- /dev/null +++ b/libraries/models/docs/User.md @@ -0,0 +1,30 @@ +# User + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**access** | **{str: (bool, date, datetime, dict, float, int, list, str, none_type)}** | | [optional] +**attributes** | **{str: (bool, date, datetime, dict, float, int, list, str, none_type)}** | | [optional] +**client_roles** | **{str: (bool, date, datetime, dict, float, int, list, str, none_type)}** | | [optional] +**created_timestamp** | **int** | | [optional] +**credentials** | [**[UserCredential]**](UserCredential.md) | | [optional] +**disableable_credential_types** | **[str]** | | [optional] +**email** | **str** | | [optional] +**email_verified** | **bool** | | [optional] +**enabled** | **bool** | | [optional] +**federation_link** | **str** | | [optional] +**first_name** | **str** | | [optional] +**groups** | **[str]** | | [optional] +**id** | **str** | | [optional] +**last_name** | **str** | | [optional] +**realm_roles** | **[str]** | | [optional] +**required_actions** | **[str]** | | [optional] +**service_account_client_id** | **str** | | [optional] +**username** | **str** | | [optional] +**additional_properties** | **bool, date, datetime, dict, float, int, list, str, none_type** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/UserCredential.md b/libraries/models/docs/UserCredential.md new file mode 100644 index 00000000..35b54178 --- /dev/null +++ b/libraries/models/docs/UserCredential.md @@ -0,0 +1,20 @@ +# UserCredential + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**created_date** | **int** | | [optional] +**credential_data** | **str** | | [optional] +**id** | **str** | | [optional] +**priority** | **int** | | [optional] +**secret_data** | **str** | | [optional] +**temporary** | **bool** | | [optional] +**type** | **str** | | [optional] +**user_label** | **str** | | [optional] +**value** | **str** | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/docs/UserGroup.md b/libraries/models/docs/UserGroup.md new file mode 100644 index 00000000..5bb41d0b --- /dev/null +++ b/libraries/models/docs/UserGroup.md @@ -0,0 +1,19 @@ +# UserGroup + + +## Properties +Name | Type | Description | Notes +------------ | ------------- | ------------- | ------------- +**access** | **{str: (bool, date, datetime, dict, float, int, list, str, none_type)}** | | [optional] +**attributes** | [**SimpleMap**](SimpleMap.md) | | [optional] +**client_roles** | **{str: (bool, date, datetime, dict, float, int, list, str, none_type)}** | | [optional] +**id** | **str** | | [optional] +**name** | **str** | | [optional] +**path** | **str** | | [optional] +**realm_roles** | **[str]** | | [optional] +**sub_groups** | [**[UserGroup]**](UserGroup.md) | | [optional] +**any string name** | **bool, date, datetime, dict, float, int, list, str, none_type** | any string name can be used but the value must be the correct type | [optional] + +[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md) + + diff --git a/libraries/models/git_push.sh b/libraries/models/git_push.sh new file mode 100644 index 00000000..ced3be2b --- /dev/null +++ b/libraries/models/git_push.sh @@ -0,0 +1,58 @@ +#!/bin/sh +# ref: https://help.github.com/articles/adding-an-existing-project-to-github-using-the-command-line/ +# +# Usage example: /bin/sh ./git_push.sh wing328 openapi-pestore-perl "minor update" "gitlab.com" + +git_user_id=$1 +git_repo_id=$2 +release_note=$3 +git_host=$4 + +if [ "$git_host" = "" ]; then + git_host="github.com" + echo "[INFO] No command line input provided. Set \$git_host to $git_host" +fi + +if [ "$git_user_id" = "" ]; then + git_user_id="GIT_USER_ID" + echo "[INFO] No command line input provided. Set \$git_user_id to $git_user_id" +fi + +if [ "$git_repo_id" = "" ]; then + git_repo_id="GIT_REPO_ID" + echo "[INFO] No command line input provided. Set \$git_repo_id to $git_repo_id" +fi + +if [ "$release_note" = "" ]; then + release_note="Minor update" + echo "[INFO] No command line input provided. Set \$release_note to $release_note" +fi + +# Initialize the local directory as a Git repository +git init + +# Adds the files in the local repository and stages them for commit. +git add . + +# Commits the tracked changes and prepares them to be pushed to a remote repository. +git commit -m "$release_note" + +# Sets the new remote +git_remote=`git remote` +if [ "$git_remote" = "" ]; then # git remote not defined + + if [ "$GIT_TOKEN" = "" ]; then + echo "[INFO] \$GIT_TOKEN (environment variable) is not set. Using the git credential in your environment." + git remote add origin https://${git_host}/${git_user_id}/${git_repo_id}.git + else + git remote add origin https://${git_user_id}:${GIT_TOKEN}@${git_host}/${git_user_id}/${git_repo_id}.git + fi + +fi + +git pull origin master + +# Pushes (Forces) the changes in the local repository up to the remote repository +echo "Git pushing to https://${git_host}/${git_user_id}/${git_repo_id}.git" +git push origin master 2>&1 | grep -v 'To https' + diff --git a/libraries/models/requirements.txt b/libraries/models/requirements.txt new file mode 100644 index 00000000..70e1f0c5 --- /dev/null +++ b/libraries/models/requirements.txt @@ -0,0 +1,11 @@ +connexion[swagger-ui] >= 2.6.0; python_version>="3.6" +# 2.3 is the last version that supports python 3.4-3.5 +connexion[swagger-ui] <= 2.3.0; python_version=="3.5" or python_version=="3.4" +# connexion requires werkzeug but connexion < 2.4.0 does not install werkzeug +# we must peg werkzeug versions below to fix connexion +# https://github.com/zalando/connexion/pull/1044 +werkzeug == 0.16.1; python_version=="3.5" or python_version=="3.4" +swagger-ui-bundle >= 0.0.2 +python_dateutil >= 2.6.0 +setuptools >= 21.0.0 +Flask == 1.1.2 diff --git a/libraries/models/setup.cfg b/libraries/models/setup.cfg new file mode 100644 index 00000000..11433ee8 --- /dev/null +++ b/libraries/models/setup.cfg @@ -0,0 +1,2 @@ +[flake8] +max-line-length=99 diff --git a/libraries/models/setup.py b/libraries/models/setup.py new file mode 100644 index 00000000..b27d4c61 --- /dev/null +++ b/libraries/models/setup.py @@ -0,0 +1,10 @@ +# coding: utf-8 + +import sys +from setuptools import setup, find_packages + +NAME = "cloudharness_model" +VERSION = "1.0.0" +REQUIREMENTS = [l[0:-1].replace(" ", "") for l in open("requirements.txt") if "#" not in l] +print(REQUIREMENTS) +setup(name=NAME, version=VERSION, install_requires=REQUIREMENTS) \ No newline at end of file diff --git a/libraries/models/test-requirements.txt b/libraries/models/test-requirements.txt new file mode 100644 index 00000000..0970f28c --- /dev/null +++ b/libraries/models/test-requirements.txt @@ -0,0 +1,4 @@ +pytest~=4.6.7 # needed for python 2.7+3.4 +pytest-cov>=2.8.1 +pytest-randomly==1.2.3 # needed for python 2.7+3.4 +Flask-Testing==0.8.0 diff --git a/libraries/models/test/__init__.py b/libraries/models/test/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/libraries/models/test/resources/values.yaml b/libraries/models/test/resources/values.yaml new file mode 100644 index 00000000..b3e1b0ca --- /dev/null +++ b/libraries/models/test/resources/values.yaml @@ -0,0 +1,652 @@ +local: false +secured_gatekeepers: true +nameOverride: '' +fullnameOverride: '' +domain: my.local +namespace: test +mainapp: myapp +registry: + name: reg/ + secret: null +tag: 1 +apps: + accounts: + harness: + subdomain: accounts + aliases: [] + domain: null + dependencies: + hard: [] + soft: [] + build: [] + secured: false + uri_role_mapping: + - uri: /* + roles: + - administrator + deployment: + auto: overridden + replicas: 1 + image: reg/cloudharness/accounts:1 + name: accounts + port: overridden + resources: &id001 + requests: + memory: 512Mi + cpu: 10m + limits: + memory: 1024Mi + cpu: 500m + service: + auto: true + name: accounts + port: 8080 + secrets: + api_user_password: '' + use_services: [] + database: + auto: true + name: keycloak-postgres + type: postgres + size: 2Gi + user: user + pass: password + mongo: + image: mongo:5 + ports: + - name: http + port: 27017 + postgres: + image: postgres:10.4 + initialdb: auth_db + ports: + - name: http + port: 5432 + neo4j: + image: neo4j:4.1.9 + memory: + size: 256M + pagecache: + size: 64M + heap: + initial: 64M + max: 128M + dbms_security_auth_enabled: 'false' + ports: + - name: http + port: 7474 + - name: bolt + port: 7687 + resources: + requests: + memory: 512Mi + cpu: 100m + limits: + memory: 2Gi + cpu: 1000m + env: + - name: KEYCLOAK_IMPORT + value: /tmp/realm.json + - name: KEYCLOAK_USER + value: admin + - name: KEYCLOAK_PASSWORD + value: metacell + - name: PROXY_ADDRESS_FORWARDING + value: 'true' + - name: DB_VENDOR + value: POSTGRES + - name: DB_ADDR + value: keycloak-postgres + - name: DB_DATABASE + value: auth_db + - name: DB_USER + value: user + - name: DB_PASSWORD + value: password + - name: JAVA_OPTS + value: -server -Xms64m -Xmx896m -XX:MetaspaceSize=96M -XX:MaxMetaspaceSize=256m + -Djava.net.preferIPv4Stack=true -Djboss.modules.system.pkgs=org.jboss.byteman + -Djava.awt.headless=true --add-exports=java.base/sun.nio.ch=ALL-UNNAMED + --add-exports=jdk.unsupported/sun.misc=ALL-UNNAMED --add-exports=jdk.unsupported/sun.reflect=ALL-UNNAMED + resources: + - name: realm-config + src: realm.json + dst: /tmp/realm.json + readinessProbe: + path: /auth/realms/master + name: accounts + client: + id: rest-client + secret: 5678eb6e-9e2c-4ee5-bd54-34e7411339e8 + webclient: + id: web-client + secret: 452952ae-922c-4766-b912-7b106271e34b + enabled: true + harvest: true + admin: + pass: metacell + user: admin + role: administrator + image: reg/cloudharness/accounts:1 + task-images: {} + common: true + a: dev + autodeploy: overridden + name: accounts + port: overridden + resources: *id001 + argo: + harness: + subdomain: argo + aliases: [] + domain: null + dependencies: + hard: [] + soft: [] + build: [] + secured: true + uri_role_mapping: + - uri: /* + roles: + - administrator + deployment: + auto: false + replicas: 1 + image: null + name: argo + port: 8080 + resources: &id002 + requests: + memory: 32Mi + cpu: 10m + limits: + memory: 500Mi + cpu: 500m + service: + auto: false + name: argo-server + port: 2746 + secrets: null + use_services: [] + database: + auto: false + name: argo-db + type: null + size: 1Gi + user: mnp + pass: metacell + mongo: + image: mongo:5 + ports: + - name: http + port: 27017 + postgres: + image: postgres:13 + initialdb: cloudharness + ports: + - name: http + port: 5432 + neo4j: + image: neo4j:4.1.9 + memory: + size: 256M + pagecache: + size: 64M + heap: + initial: 64M + max: 128M + dbms_security_auth_enabled: 'false' + ports: + - name: http + port: 7474 + - name: bolt + port: 7687 + resources: + requests: + memory: 512Mi + cpu: 100m + limits: + memory: 2Gi + cpu: 1000m + name: argo + serviceaccount: argo-workflows + task-images: {} + image: null + name: argo + port: 8080 + resources: *id002 + samples: + harness: + subdomain: samples + aliases: [] + domain: null + dependencies: + hard: [] + soft: + - workflows + - events + - accounts + build: + - cloudharness-flask + - cloudharness-frontend-build + secured: false + uri_role_mapping: + - uri: /* + roles: + - administrator + deployment: + auto: true + replicas: 1 + image: reg/cloudharness/samples:1 + name: samples + port: 8080 + resources: &id003 + requests: + memory: 32Mi + cpu: 10m + limits: + memory: 500Mi + cpu: 500m + service: + auto: true + name: samples + port: 8080 + secrets: null + use_services: [] + database: + auto: false + name: samples-db + type: null + size: 1Gi + user: mnp + pass: metacell + mongo: + image: mongo:5 + ports: + - name: http + port: 27017 + postgres: + image: postgres:13 + initialdb: cloudharness + ports: + - name: http + port: 5432 + neo4j: + image: neo4j:4.1.9 + memory: + size: 256M + pagecache: + size: 64M + heap: + initial: 64M + max: 128M + dbms_security_auth_enabled: 'false' + ports: + - name: http + port: 7474 + - name: bolt + port: 7687 + resources: + requests: + memory: 512Mi + cpu: 100m + limits: + memory: 2Gi + cpu: 1000m + sentry: true + port: 80 + env: + - name: WORKERS + value: '3' + resources: + - name: my-config + src: myConfig.json + dst: /tmp/resources/myConfig.json + - name: example + src: example.yaml + dst: /usr/src/app/important_config.yaml + name: samples + image: reg/cloudharness/samples:1 + task-images: + samples-print-file: reg/cloudharness/samples-print-file:1 + samples-sum: reg/cloudharness/samples-sum:1 + cloudharness-flask: reg/cloudharness/cloudharness-flask:1 + cloudharness-frontend-build: reg/cloudharness/cloudharness-frontend-build:1 + name: samples + port: 8080 + resources: *id003 + workflows: + harness: + subdomain: workflows + aliases: [] + domain: null + dependencies: + hard: + - argo + soft: [] + build: + - cloudharness-flask + secured: false + uri_role_mapping: + - uri: /* + roles: + - administrator + deployment: + auto: true + replicas: 1 + image: reg/cloudharness/workflows:1 + name: workflows + port: 8080 + resources: &id004 + requests: + memory: 32Mi + cpu: 10m + limits: + memory: 500Mi + cpu: 500m + service: + auto: true + name: workflows + port: 8080 + secrets: null + use_services: [] + database: + auto: false + name: workflows-db + type: null + size: 1Gi + user: mnp + pass: metacell + mongo: + image: mongo:5 + ports: + - name: http + port: 27017 + postgres: + image: postgres:13 + initialdb: cloudharness + ports: + - name: http + port: 5432 + neo4j: + image: neo4j:4.1.9 + memory: + size: 256M + pagecache: + size: 64M + heap: + initial: 64M + max: 128M + dbms_security_auth_enabled: 'false' + ports: + - name: http + port: 7474 + - name: bolt + port: 7687 + resources: + requests: + memory: 512Mi + cpu: 100m + limits: + memory: 2Gi + cpu: 1000m + name: workflows + image: reg/cloudharness/workflows:1 + task-images: + workflows-send-result-event: reg/cloudharness/workflows-send-result-event:1 + workflows-extract-download: reg/cloudharness/workflows-extract-download:1 + workflows-notify-queue: reg/cloudharness/workflows-notify-queue:1 + cloudharness-flask: reg/cloudharness/cloudharness-flask:1 + workflows-new-task: reg/cloudharness/workflows-new-task:1 + common: true + name: workflows + port: 8080 + resources: *id004 + legacy: + harness: + subdomain: null + aliases: [] + domain: null + dependencies: + hard: [] + soft: [] + build: [] + secured: false + uri_role_mapping: + - uri: /* + roles: + - administrator + deployment: + auto: legacy + replicas: 1 + image: null + name: legacy + port: 8080 + resources: &id005 + requests: + memory: 32Mi + cpu: 10m + limits: + memory: 500Mi + cpu: 500m + service: + auto: true + name: legacy + port: 80 + secrets: null + use_services: [] + database: + auto: false + name: legacy-db + type: null + size: 1Gi + user: mnp + pass: metacell + mongo: + image: mongo:5 + ports: + - name: http + port: 27017 + postgres: + image: postgres:13 + initialdb: cloudharness + ports: + - name: http + port: 5432 + neo4j: + image: neo4j:4.1.9 + memory: + size: 256M + pagecache: + size: 64M + heap: + initial: 64M + max: 128M + dbms_security_auth_enabled: 'false' + ports: + - name: http + port: 7474 + - name: bolt + port: 7687 + resources: + requests: + memory: 512Mi + cpu: 100m + limits: + memory: 2Gi + cpu: 1000m + name: legacy + common: true + autodeploy: legacy + a: b + name: legacy + task-images: {} + image: null + port: 8080 + resources: *id005 + myapp: + harness: + subdomain: mysubdomain + aliases: [] + domain: null + dependencies: + hard: [] + soft: + - legacy + build: + - cloudharness-flask + - my-common + secured: false + uri_role_mapping: + - uri: /* + roles: + - administrator + deployment: + auto: false + replicas: 1 + image: reg/cloudharness/myapp:1 + name: myapp + port: 8080 + resources: &id006 + requests: + memory: 32Mi + cpu: 10m + limits: + memory: 500Mi + cpu: 500m + service: + auto: true + name: myapp + port: 80 + secrets: null + use_services: [] + database: + auto: false + name: myapp-db + type: null + size: 1Gi + user: mnp + pass: metacell + mongo: + image: mongo:5 + ports: + - name: http + port: 27017 + postgres: + image: postgres:13 + initialdb: cloudharness + ports: + - name: http + port: 5432 + neo4j: + image: neo4j:4.1.9 + memory: + size: 256M + pagecache: + size: 64M + heap: + initial: 64M + max: 128M + dbms_security_auth_enabled: 'false' + ports: + - name: http + port: 7474 + - name: bolt + port: 7687 + resources: + requests: + memory: 512Mi + cpu: 100m + limits: + memory: 2Gi + cpu: 1000m + name: myapp + common: true + a: b + image: reg/cloudharness/myapp:1 + task-images: + myapp-mytask: reg/cloudharness/myapp-mytask:1 + cloudharness-flask: reg/cloudharness/cloudharness-flask:1 + my-common: reg/cloudharness/my-common:1 + name: myapp + port: 8080 + resources: *id006 +env: +- name: CH_VERSION + value: 0.0.1 +- name: CH_CHART_VERSION + value: 0.0.1 +- name: CH_ACCOUNTS_SUBDOMAIN + value: accounts +- name: CH_ACCOUNTS_NAME + value: accounts +- name: CH_ARGO_SUBDOMAIN + value: argo +- name: CH_ARGO_NAME + value: argo +- name: CH_SAMPLES_SUBDOMAIN + value: samples +- name: CH_SAMPLES_PORT + value: 80 +- name: CH_SAMPLES_NAME + value: samples +- name: CH_WORKFLOWS_SUBDOMAIN + value: workflows +- name: CH_WORKFLOWS_NAME + value: workflows +- name: CH_LEGACY_SUBDOMAIN + value: null +- name: CH_LEGACY_NAME + value: legacy +- name: CH_MYAPP_SUBDOMAIN + value: mysubdomain +- name: CH_MYAPP_NAME + value: myapp +- name: CH_DOMAIN + value: my.local +- name: CH_IMAGE_REGISTRY + value: reg/ +- name: CH_IMAGE_TAG + value: 1 +privenv: +- name: CH_SECRET + value: "In God we trust; all others must bring data. \u2015 W. Edwards Deming" +ingress: + enabled: true + name: cloudharness-ingress + ssl_redirect: true + letsencrypt: + email: filippo@metacell.us +backup: + active: true + keep_days: '7' + keep_weeks: '4' + keep_months: '6' + schedule: '*/5 * * * *' + suffix: .gz + volumesize: 2Gi + dir: /backups + resources: + requests: + memory: 32Mi + cpu: 25m + limits: + memory: 64Mi + cpu: 50m +name: cloudharness +a: dev +database: + auto: false +task-images: + samples-print-file: reg/cloudharness/samples-print-file:1 + samples-sum: reg/cloudharness/samples-sum:1 + cloudharness-flask: reg/cloudharness/cloudharness-flask:1 + cloudharness-frontend-build: reg/cloudharness/cloudharness-frontend-build:1 + workflows-send-result-event: reg/cloudharness/workflows-send-result-event:1 + workflows-extract-download: reg/cloudharness/workflows-extract-download:1 + workflows-notify-queue: reg/cloudharness/workflows-notify-queue:1 + workflows-new-task: reg/cloudharness/workflows-new-task:1 + myapp-mytask: reg/cloudharness/myapp-mytask:1 + my-common: reg/cloudharness/my-common:1 + cloudharness-base: reg/cloudharness/cloudharness-base:1 +tls: true diff --git a/libraries/models/test/test_helm_values.py b/libraries/models/test/test_helm_values.py new file mode 100644 index 00000000..a6bbbf32 --- /dev/null +++ b/libraries/models/test/test_helm_values.py @@ -0,0 +1,15 @@ +import pytest +from os.path import join, dirname as dn, realpath +import yaml + +from cloudharness_model import HarnessMainConfig + +HERE = dn(realpath(__file__)) + +def test_helm_values_deserialize(): + with open(join(HERE, "resources/values.yaml")) as f: + values = yaml.load(f) + v = HarnessMainConfig.from_dict(values) + + assert v.domain + assert v.apps["accounts"].harness.deployment.name == "accounts" diff --git a/libraries/models/tox.ini b/libraries/models/tox.ini new file mode 100644 index 00000000..e76b6dbf --- /dev/null +++ b/libraries/models/tox.ini @@ -0,0 +1,11 @@ +[tox] +envlist = py3 +skipsdist=True + +[testenv] +deps=-r{toxinidir}/requirements.txt + -r{toxinidir}/test-requirements.txt + {toxinidir} + +commands= + pytest --cov=cloudharness_model