diff --git a/api/openapi.json b/api/openapi.json index 19a780a8..dcb7d219 100644 --- a/api/openapi.json +++ b/api/openapi.json @@ -166,7 +166,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_File_" + "$ref": "#/components/schemas/Page_File_" } } } @@ -245,6 +245,167 @@ "HTTPBasic": [] } ] + }, + "post": { + "tags": [ + "files" + ], + "summary": "Get Upload Links", + "description": "Get upload links for uploading a file to storage\n\nArguments:\n request -- Request object\n client_file -- ClientFile object\n user_id -- User Id\n\nReturns:\n FileUploadSchema", + "operationId": "get_upload_links", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ClientFile" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ClientFileUploadSchema" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } + }, + "/v0/files/{file_id}:complete": { + "post": { + "tags": [ + "files" + ], + "summary": "Complete Multipart Upload", + "description": "Complete multipart upload\n\nArguments:\n request: The Request object\n file_id: The Storage id\n file: The File object which is to be completed\n uploaded_parts: The uploaded parts\n completion_link: The completion link\n user_id: The user id\n\nReturns:\n The completed File object", + "operationId": "complete_multipart_upload", + "parameters": [ + { + "required": true, + "schema": { + "title": "File Id", + "type": "string", + "format": "uuid" + }, + "name": "file_id", + "in": "path" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_complete_multipart_upload_v0_files__file_id__complete_post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/File" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] + } + }, + "/v0/files/{file_id}:abort": { + "post": { + "tags": [ + "files" + ], + "summary": "Abort Multipart Upload", + "description": "Abort a multipart upload\n\nArguments:\n request: The Request\n file_id: The StorageFileID\n upload_links: The FileUploadSchema\n user_id: The user id", + "operationId": "abort_multipart_upload", + "parameters": [ + { + "required": true, + "schema": { + "title": "File Id", + "type": "string", + "format": "uuid" + }, + "name": "file_id", + "in": "path" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Body_abort_multipart_upload_v0_files__file_id__abort_post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "HTTPBasic": [] + } + ] } }, "/v0/files/{file_id}": { @@ -491,7 +652,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_Solver_" + "$ref": "#/components/schemas/Page_Solver_" } } } @@ -578,7 +739,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_Solver_" + "$ref": "#/components/schemas/Page_Solver_" } } } @@ -746,7 +907,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_Solver_" + "$ref": "#/components/schemas/Page_Solver_" } } } @@ -1071,7 +1232,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_Job_" + "$ref": "#/components/schemas/Page_Job_" } } } @@ -1769,12 +1930,13 @@ ] } }, - "/v0/studies/": { + "/v0/studies": { "get": { "tags": [ "studies" ], "summary": "List Studies", + "description": "New in *version 0.5.0* (only with API_SERVER_DEV_FEATURES_ENABLED=1)", "operationId": "list_studies", "parameters": [ { @@ -1807,7 +1969,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_Study_" + "$ref": "#/components/schemas/Page_Study_" } } } @@ -1822,7 +1984,12 @@ } } } - } + }, + "security": [ + { + "HTTPBasic": [] + } + ] } }, "/v0/studies/{study_id}": { @@ -1831,6 +1998,7 @@ "studies" ], "summary": "Get Study", + "description": "New in *version 0.5.0* (only with API_SERVER_DEV_FEATURES_ENABLED=1)", "operationId": "get_study", "parameters": [ { @@ -1855,6 +2023,16 @@ } } }, + "404": { + "description": "Study not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Validation Error", "content": { @@ -1865,7 +2043,12 @@ } } } - } + }, + "security": [ + { + "HTTPBasic": [] + } + ] } }, "/v0/studies/{study_id}/ports": { @@ -1899,6 +2082,16 @@ } } }, + "404": { + "description": "Study not found", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ErrorGet" + } + } + } + }, "422": { "description": "Validation Error", "content": { @@ -1965,7 +2158,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/LimitOffsetPage_Job_" + "$ref": "#/components/schemas/Page_Job_" } } } @@ -2521,6 +2714,34 @@ }, "components": { "schemas": { + "Body_abort_multipart_upload_v0_files__file_id__abort_post": { + "title": "Body_abort_multipart_upload_v0_files__file_id__abort_post", + "required": [ + "client_file" + ], + "type": "object", + "properties": { + "client_file": { + "$ref": "#/components/schemas/ClientFile" + } + } + }, + "Body_complete_multipart_upload_v0_files__file_id__complete_post": { + "title": "Body_complete_multipart_upload_v0_files__file_id__complete_post", + "required": [ + "client_file", + "uploaded_parts" + ], + "type": "object", + "properties": { + "client_file": { + "$ref": "#/components/schemas/ClientFile" + }, + "uploaded_parts": { + "$ref": "#/components/schemas/FileUploadCompletionBody" + } + } + }, "Body_upload_file_v0_files_content_put": { "title": "Body_upload_file_v0_files_content_put", "required": [ @@ -2535,6 +2756,52 @@ } } }, + "ClientFile": { + "title": "ClientFile", + "required": [ + "filename", + "filesize" + ], + "type": "object", + "properties": { + "filename": { + "title": "Filename", + "type": "string", + "description": "File name" + }, + "filesize": { + "title": "Filesize", + "type": "integer", + "description": "File size in bytes" + } + }, + "description": "Represents a file stored on the client side" + }, + "ClientFileUploadSchema": { + "title": "ClientFileUploadSchema", + "required": [ + "file_id", + "upload_schema" + ], + "type": "object", + "properties": { + "file_id": { + "title": "File Id", + "type": "string", + "description": "The file id", + "format": "uuid" + }, + "upload_schema": { + "title": "Upload Schema", + "allOf": [ + { + "$ref": "#/components/schemas/FileUploadSchema" + } + ], + "description": "Schema for uploading file" + } + } + }, "ErrorGet": { "title": "ErrorGet", "required": [ @@ -2581,48 +2848,116 @@ }, "description": "Represents a file stored on the server side i.e. a unique reference to a file in the cloud." }, - "Groups": { - "title": "Groups", + "FileUploadCompletionBody": { + "title": "FileUploadCompletionBody", "required": [ - "me", - "all" + "parts" ], "type": "object", "properties": { - "me": { - "$ref": "#/components/schemas/UsersGroup" - }, - "organizations": { - "title": "Organizations", + "parts": { + "title": "Parts", "type": "array", "items": { - "$ref": "#/components/schemas/UsersGroup" - }, - "default": [] - }, - "all": { - "$ref": "#/components/schemas/UsersGroup" + "$ref": "#/components/schemas/UploadedPart" + } } } }, - "HTTPValidationError": { - "title": "HTTPValidationError", + "FileUploadLinks": { + "title": "FileUploadLinks", + "required": [ + "abort_upload", + "complete_upload" + ], "type": "object", "properties": { - "errors": { - "title": "Validation errors", - "type": "array", - "items": { - "$ref": "#/components/schemas/ValidationError" - } + "abort_upload": { + "title": "Abort Upload", + "maxLength": 65536, + "minLength": 1, + "type": "string", + "format": "uri" + }, + "complete_upload": { + "title": "Complete Upload", + "maxLength": 65536, + "minLength": 1, + "type": "string", + "format": "uri" } } }, - "Job": { - "title": "Job", + "FileUploadSchema": { + "title": "FileUploadSchema", "required": [ - "id", - "name", + "chunk_size", + "urls", + "links" + ], + "type": "object", + "properties": { + "chunk_size": { + "title": "Chunk Size", + "type": "integer" + }, + "urls": { + "title": "Urls", + "type": "array", + "items": { + "maxLength": 65536, + "minLength": 1, + "type": "string", + "format": "uri" + } + }, + "links": { + "$ref": "#/components/schemas/FileUploadLinks" + } + } + }, + "Groups": { + "title": "Groups", + "required": [ + "me", + "all" + ], + "type": "object", + "properties": { + "me": { + "$ref": "#/components/schemas/UsersGroup" + }, + "organizations": { + "title": "Organizations", + "type": "array", + "items": { + "$ref": "#/components/schemas/UsersGroup" + }, + "default": [] + }, + "all": { + "$ref": "#/components/schemas/UsersGroup" + } + } + }, + "HTTPValidationError": { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "errors": { + "title": "Validation errors", + "type": "array", + "items": { + "$ref": "#/components/schemas/ValidationError" + } + } + } + }, + "Job": { + "title": "Job", + "required": [ + "id", + "name", "inputs_checksum", "created_at", "runner_name", @@ -2923,12 +3258,95 @@ "started_at": "2021-04-01 07:16:43.670610" } }, - "LimitOffsetPage_File_": { - "title": "LimitOffsetPage[File]", + "Links": { + "title": "Links", + "type": "object", + "properties": { + "first": { + "title": "First", + "type": "string", + "example": "/api/v1/users?limit=1&offset1" + }, + "last": { + "title": "Last", + "type": "string", + "example": "/api/v1/users?limit=1&offset1" + }, + "self": { + "title": "Self", + "type": "string", + "example": "/api/v1/users?limit=1&offset1" + }, + "next": { + "title": "Next", + "type": "string", + "example": "/api/v1/users?limit=1&offset1" + }, + "prev": { + "title": "Prev", + "type": "string", + "example": "/api/v1/users?limit=1&offset1" + } + } + }, + "Meta": { + "title": "Meta", "required": [ - "items", - "total", - "links" + "name", + "version", + "docs_url", + "docs_dev_url" + ], + "type": "object", + "properties": { + "name": { + "title": "Name", + "type": "string" + }, + "version": { + "title": "Version", + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "type": "string" + }, + "released": { + "title": "Released", + "type": "object", + "additionalProperties": { + "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", + "type": "string" + }, + "description": "Maps every route's path tag with a released version" + }, + "docs_url": { + "title": "Docs Url", + "maxLength": 65536, + "minLength": 1, + "type": "string", + "format": "uri" + }, + "docs_dev_url": { + "title": "Docs Dev Url", + "maxLength": 65536, + "minLength": 1, + "type": "string", + "format": "uri" + } + }, + "example": { + "name": "simcore_service_foo", + "version": "2.4.45", + "released": { + "v1": "1.3.4", + "v2": "2.4.45" + }, + "docs_url": "https://api.osparc.io/dev/doc", + "docs_dev_url": "https://api.osparc.io/dev/doc" + } + }, + "OnePage_SolverPort_": { + "title": "OnePage[SolverPort]", + "required": [ + "items" ], "type": "object", "properties": { @@ -2936,31 +3354,41 @@ "title": "Items", "type": "array", "items": { - "$ref": "#/components/schemas/File" + "$ref": "#/components/schemas/SolverPort" } }, "total": { "title": "Total", "minimum": 0, "type": "integer" + } + }, + "description": "A single page is used to envelope a small sequence that does not require\npagination\n\nIf total > MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, we should consider extending this\nentrypoint to proper pagination" + }, + "OnePage_StudyPort_": { + "title": "OnePage[StudyPort]", + "required": [ + "items" + ], + "type": "object", + "properties": { + "items": { + "title": "Items", + "type": "array", + "items": { + "$ref": "#/components/schemas/StudyPort" + } }, - "limit": { - "title": "Limit", - "minimum": 1, - "type": "integer" - }, - "offset": { - "title": "Offset", + "total": { + "title": "Total", "minimum": 0, "type": "integer" - }, - "links": { - "$ref": "#/components/schemas/Links" } - } + }, + "description": "A single page is used to envelope a small sequence that does not require\npagination\n\nIf total > MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, we should consider extending this\nentrypoint to proper pagination" }, - "LimitOffsetPage_Job_": { - "title": "LimitOffsetPage[Job]", + "Page_File_": { + "title": "Page[File]", "required": [ "items", "total", @@ -2972,7 +3400,7 @@ "title": "Items", "type": "array", "items": { - "$ref": "#/components/schemas/Job" + "$ref": "#/components/schemas/File" } }, "total": { @@ -2995,8 +3423,8 @@ } } }, - "LimitOffsetPage_Solver_": { - "title": "LimitOffsetPage[Solver]", + "Page_Job_": { + "title": "Page[Job]", "required": [ "items", "total", @@ -3008,7 +3436,7 @@ "title": "Items", "type": "array", "items": { - "$ref": "#/components/schemas/Solver" + "$ref": "#/components/schemas/Job" } }, "total": { @@ -3031,8 +3459,8 @@ } } }, - "LimitOffsetPage_Study_": { - "title": "LimitOffsetPage[Study]", + "Page_Solver_": { + "title": "Page[Solver]", "required": [ "items", "total", @@ -3044,7 +3472,7 @@ "title": "Items", "type": "array", "items": { - "$ref": "#/components/schemas/Study" + "$ref": "#/components/schemas/Solver" } }, "total": { @@ -3067,95 +3495,12 @@ } } }, - "Links": { - "title": "Links", - "type": "object", - "properties": { - "first": { - "title": "First", - "type": "string", - "example": "/api/v1/users?limit=1&offset1" - }, - "last": { - "title": "Last", - "type": "string", - "example": "/api/v1/users?limit=1&offset1" - }, - "self": { - "title": "Self", - "type": "string", - "example": "/api/v1/users?limit=1&offset1" - }, - "next": { - "title": "Next", - "type": "string", - "example": "/api/v1/users?limit=1&offset1" - }, - "prev": { - "title": "Prev", - "type": "string", - "example": "/api/v1/users?limit=1&offset1" - } - } - }, - "Meta": { - "title": "Meta", + "Page_Study_": { + "title": "Page[Study]", "required": [ - "name", - "version", - "docs_url", - "docs_dev_url" - ], - "type": "object", - "properties": { - "name": { - "title": "Name", - "type": "string" - }, - "version": { - "title": "Version", - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" - }, - "released": { - "title": "Released", - "type": "object", - "additionalProperties": { - "pattern": "^(0|[1-9]\\d*)(\\.(0|[1-9]\\d*)){2}(-(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*)(\\.(0|[1-9]\\d*|\\d*[-a-zA-Z][-\\da-zA-Z]*))*)?(\\+[-\\da-zA-Z]+(\\.[-\\da-zA-Z-]+)*)?$", - "type": "string" - }, - "description": "Maps every route's path tag with a released version" - }, - "docs_url": { - "title": "Docs Url", - "maxLength": 65536, - "minLength": 1, - "type": "string", - "format": "uri" - }, - "docs_dev_url": { - "title": "Docs Dev Url", - "maxLength": 65536, - "minLength": 1, - "type": "string", - "format": "uri" - } - }, - "example": { - "name": "simcore_service_foo", - "version": "2.4.45", - "released": { - "v1": "1.3.4", - "v2": "2.4.45" - }, - "docs_url": "https://api.osparc.io/dev/doc", - "docs_dev_url": "https://api.osparc.io/dev/doc" - } - }, - "OnePage_SolverPort_": { - "title": "OnePage[SolverPort]", - "required": [ - "items" + "items", + "total", + "links" ], "type": "object", "properties": { @@ -3163,38 +3508,28 @@ "title": "Items", "type": "array", "items": { - "$ref": "#/components/schemas/SolverPort" + "$ref": "#/components/schemas/Study" } }, "total": { "title": "Total", "minimum": 0, "type": "integer" - } - }, - "description": "A single page is used to envelope a small sequence that does not require\npagination\n\nIf total > MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, we should consider extending this\nentrypoint to proper pagination" - }, - "OnePage_StudyPort_": { - "title": "OnePage[StudyPort]", - "required": [ - "items" - ], - "type": "object", - "properties": { - "items": { - "title": "Items", - "type": "array", - "items": { - "$ref": "#/components/schemas/StudyPort" - } }, - "total": { - "title": "Total", + "limit": { + "title": "Limit", + "minimum": 1, + "type": "integer" + }, + "offset": { + "title": "Offset", "minimum": 0, "type": "integer" + }, + "links": { + "$ref": "#/components/schemas/Links" } - }, - "description": "A single page is used to envelope a small sequence that does not require\npagination\n\nIf total > MAXIMUM_NUMBER_OF_ITEMS_PER_PAGE, we should consider extending this\nentrypoint to proper pagination" + } }, "Profile": { "title": "Profile", @@ -3276,8 +3611,8 @@ "PUBLISHED", "NOT_STARTED", "PENDING", + "WAITING_FOR_RESOURCES", "STARTED", - "RETRY", "SUCCESS", "FAILED", "ABORTED" @@ -3391,6 +3726,14 @@ "title": "Uid", "type": "string", "format": "uuid" + }, + "title": { + "title": "Title", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" } } }, @@ -3434,6 +3777,26 @@ } } }, + "UploadedPart": { + "title": "UploadedPart", + "required": [ + "number", + "e_tag" + ], + "type": "object", + "properties": { + "number": { + "title": "Number", + "exclusiveMinimum": true, + "type": "integer", + "minimum": 0 + }, + "e_tag": { + "title": "E Tag", + "type": "string" + } + } + }, "UserRoleEnum": { "title": "UserRoleEnum", "enum": [ diff --git a/clients/python/client/osparc/__init__.py b/clients/python/client/osparc/__init__.py index 9f7ae7a7..dafd1075 100644 --- a/clients/python/client/osparc/__init__.py +++ b/clients/python/client/osparc/__init__.py @@ -3,6 +3,7 @@ """ from typing import Tuple +import nest_asyncio from osparc_client import ( # APIs; API client; models ApiClient, ApiException, @@ -13,7 +14,6 @@ Configuration, ErrorGet, File, - FilesApi, Groups, HTTPValidationError, Job, @@ -22,10 +22,6 @@ JobMetadataUpdate, JobOutputs, JobStatus, - LimitOffsetPageFile, - LimitOffsetPageJob, - LimitOffsetPageSolver, - LimitOffsetPageStudy, Links, Meta, MetaApi, @@ -49,13 +45,18 @@ __version__, ) +from ._files_api import FilesApi from ._info import openapi from ._solvers_api import SolversApi +from ._utils import PaginationGenerator + +nest_asyncio.apply() # allow to run coroutines via asyncio.run(coro) __all__: Tuple[str, ...] = ( # imports from osparc_client "__version__", "FilesApi", + "PaginationGenerator", "MetaApi", "SolversApi", "UsersApi", @@ -86,14 +87,10 @@ "OnePageSolverPort", "StudyPort", "Study", - "LimitOffsetPageStudy", - "LimitOffsetPageFile", "JobMetadataUpdate", - "LimitOffsetPageJob", "Links", "SolverPort", "JobMetadata", - "LimitOffsetPageSolver", "ErrorGet", "OnePageStudyPort", # imports from osparc diff --git a/clients/python/client/osparc/_files_api.py b/clients/python/client/osparc/_files_api.py new file mode 100644 index 00000000..c1970274 --- /dev/null +++ b/clients/python/client/osparc/_files_api.py @@ -0,0 +1,153 @@ +import asyncio +import json +import math +import random +import shutil +import string +from pathlib import Path +from typing import Any, Iterator, List, Optional, Tuple, Union + +import httpx +from httpx import AsyncClient, Response +from osparc_client import ( + BodyCompleteMultipartUploadV0FilesFileIdCompletePost, + ClientFile, + ClientFileUploadSchema, +) +from osparc_client import FilesApi as _FilesApi +from osparc_client import FileUploadCompletionBody, FileUploadLinks, UploadedPart +from tqdm.asyncio import tqdm_asyncio + +from . import ApiClient, File +from ._http_client import AsyncHttpClient +from ._utils import _file_chunk_generator + + +class FilesApi(_FilesApi): + """Class for interacting with files""" + + def __init__(self, api_client: Optional[ApiClient] = None): + """Construct object + + Args: + api_client (ApiClient, optinal): osparc.ApiClient object + """ + super().__init__(api_client) + self._super = super(FilesApi, self) + user: Optional[str] = self.api_client.configuration.username + passwd: Optional[str] = self.api_client.configuration.password + self._auth: Optional[httpx.BasicAuth] = ( + httpx.BasicAuth(username=user, password=passwd) + if (user is not None and passwd is not None) + else None + ) + + def download_file( + self, file_id: str, *, destination_folder: Optional[Path] = None + ) -> str: + if destination_folder is not None and not destination_folder.is_dir(): + raise RuntimeError( + f"destination_folder: {destination_folder} must be a directory" + ) + downloaded_file: Path = Path(super().download_file(file_id)) + if destination_folder is not None: + dest_file: Path = destination_folder / downloaded_file.name + while dest_file.is_file(): + new_name = ( + downloaded_file.stem + + "".join(random.choices(string.ascii_letters, k=8)) + + downloaded_file.suffix + ) + dest_file = destination_folder / new_name + shutil.move(downloaded_file, dest_file) + downloaded_file = dest_file + return str(downloaded_file.resolve()) + + def upload_file(self, file: Union[str, Path]): + return asyncio.run(self.upload_file_async(file=file)) + + async def upload_file_async(self, file: Union[str, Path]) -> File: + if isinstance(file, str): + file = Path(file) + if not file.is_file(): + raise RuntimeError(f"{file} is not a file") + client_file: ClientFile = ClientFile( + filename=file.name, filesize=file.stat().st_size + ) + client_upload_schema: ClientFileUploadSchema = self._super.get_upload_links( + client_file=client_file + ) + chunk_size: int = client_upload_schema.upload_schema.chunk_size + links: FileUploadLinks = client_upload_schema.upload_schema.links + url_iter: Iterator[Tuple[int, str]] = enumerate( + iter(client_upload_schema.upload_schema.urls), start=1 + ) + if len(client_upload_schema.upload_schema.urls) < math.ceil( + file.stat().st_size / chunk_size + ): + raise RuntimeError( + "Did not receive sufficient number of upload URLs from the server." + ) + + tasks: list = [] + async with AsyncHttpClient( + exception_request_type="post", + exception_url=links.abort_upload, + exception_auth=self._auth, + ) as session: + async for chunck, size in _file_chunk_generator(file, chunk_size): + # following https://docs.python.org/3/library/asyncio-task.html#asyncio.create_task + index, url = next(url_iter) + task = asyncio.create_task( + self._upload_chunck( + http_client=session, + chunck=chunck, + chunck_size=size, + upload_link=url, + index=index, + ) + ) + tasks.append(task) + + uploaded_parts: List[UploadedPart] = await tqdm_asyncio.gather(*tasks) + + return await self._complete_multipart_upload( + session, links.complete_upload, client_file, uploaded_parts + ) + + async def _complete_multipart_upload( + self, + http_client: AsyncClient, + complete_link: str, + client_file: ClientFile, + uploaded_parts: List[UploadedPart], + ) -> File: + complete_payload = BodyCompleteMultipartUploadV0FilesFileIdCompletePost( + client_file=client_file, + uploaded_parts=FileUploadCompletionBody(parts=uploaded_parts), + ) + response: Response = await http_client.post( + complete_link, + json=complete_payload.to_dict(), + auth=self._auth, + ) + response.raise_for_status() + payload: dict[str, Any] = response.json() + return File(**payload) + + async def _upload_chunck( + self, + http_client: AsyncClient, + chunck: bytes, + chunck_size: int, + upload_link: str, + index: int, + ) -> UploadedPart: + response: Response = await http_client.put( + upload_link, content=chunck, headers={"Content-Length": f"{chunck_size}"} + ) + response.raise_for_status() + assert response.headers # nosec + assert "Etag" in response.headers # nosec + etag: str = json.loads(response.headers["Etag"]) + return UploadedPart(number=index, e_tag=etag) diff --git a/clients/python/client/osparc/_http_client.py b/clients/python/client/osparc/_http_client.py new file mode 100644 index 00000000..45597d50 --- /dev/null +++ b/clients/python/client/osparc/_http_client.py @@ -0,0 +1,49 @@ +from typing import Optional + +import httpx +import tenacity + + +class AsyncHttpClient: + """Async http client context manager""" + + def __init__( + self, + exception_request_type: Optional[str] = None, + exception_url: Optional[str] = None, + exception_auth: Optional[httpx.BasicAuth] = None, + ): + self._client = httpx.AsyncClient() + self._exc_callback = ( + getattr(self._client, exception_request_type) + if exception_request_type + else None + ) + self._exc_url = exception_url + self._exc_auth = exception_auth + + async def __aenter__(self) -> httpx.AsyncClient: + return self._client + + async def __aexit__(self, exc_type, exc_value, traceback) -> None: + if exc_value is None: + await self._client.aclose() + else: # exception raised: need to handle + if self._exc_callback is not None: + try: + async for attempt in tenacity.AsyncRetrying( + reraise=True, + wait=tenacity.wait_fixed(1), + stop=tenacity.stop_after_delay(10), + retry=tenacity.retry_if_exception_type(httpx.RequestError), + ): + with attempt: + response = await self._exc_callback( + self._exc_url, auth=self._exc_auth + ) + response.raise_for_status() + except Exception as err: + await self._client.aclose() + raise err from exc_value + await self._client.aclose() + raise exc_value diff --git a/clients/python/client/osparc/_solvers_api.py b/clients/python/client/osparc/_solvers_api.py index 64d229c8..fbc731c4 100644 --- a/clients/python/client/osparc/_solvers_api.py +++ b/clients/python/client/osparc/_solvers_api.py @@ -1,5 +1,6 @@ from typing import Optional +import httpx from osparc_client import SolversApi as _SolversApi from . import ApiClient @@ -16,14 +17,19 @@ def __init__(self, api_client: Optional[ApiClient] = None): api_client (ApiClient, optinal): osparc.ApiClient object """ super().__init__(api_client) + user: Optional[str] = self.api_client.configuration.username + passwd: Optional[str] = self.api_client.configuration.password + self._auth: Optional[httpx.BasicAuth] = ( + httpx.BasicAuth(username=user, password=passwd) + if (user is not None and passwd is not None) + else None + ) def get_jobs_page(self, solver_key: str, version: str) -> None: """Method only for internal use""" raise NotImplementedError("This method is only for internal use") - def get_jobs( - self, solver_key: str, version: str, limit: int = 20, offset: int = 0 - ) -> PaginationGenerator: + def jobs(self, solver_key: str, version: str) -> PaginationGenerator: """Returns an iterator through which one can iterate over all Jobs submitted to the solver @@ -39,9 +45,13 @@ def get_jobs( (its "length") """ - def pagination_method(limit, offset): + def pagination_method(): return super(SolversApi, self).get_jobs_page( - solver_key=solver_key, version=version, limit=limit, offset=offset + solver_key=solver_key, version=version, limit=20, offset=0 ) - return PaginationGenerator(pagination_method, limit, offset) + return PaginationGenerator( + pagination_method, + base_url=self.api_client.configuration.host, + auth=self._auth, + ) diff --git a/clients/python/client/osparc/_utils.py b/clients/python/client/osparc/_utils.py index 5fe5c2e6..32f38bb2 100644 --- a/clients/python/client/osparc/_utils.py +++ b/clients/python/client/osparc/_utils.py @@ -1,19 +1,20 @@ -from typing import Callable, Generator, TypeVar, Union +import asyncio +from pathlib import Path +from typing import AsyncGenerator, Callable, Generator, Optional, Tuple, TypeVar, Union +import httpx from osparc_client import ( File, Job, - LimitOffsetPageFile, - LimitOffsetPageJob, - LimitOffsetPageSolver, - LimitOffsetPageStudy, + PageFile, + PageJob, + PageSolver, + PageStudy, Solver, Study, ) -Page = Union[ - LimitOffsetPageJob, LimitOffsetPageFile, LimitOffsetPageSolver, LimitOffsetPageStudy -] +Page = Union[PageJob, PageFile, PageSolver, PageStudy] T = TypeVar("T", Job, File, Solver, Study) @@ -22,40 +23,65 @@ class PaginationGenerator: def __init__( self, - pagination_method: Callable[[int, int], Page], - limit: int = 20, - offset: int = 0, + first_page_callback: Callable[[], Page], + base_url: str, + auth: Optional[httpx.BasicAuth], ): - self._pagination_method: Callable[[int, int], Page] = pagination_method - self._limit: int = limit - self._offset: int = offset + self._first_page_callback: Callable[[], Page] = first_page_callback + self._next_page_url: Optional[str] = None + self._client: httpx.Client = httpx.Client(auth=auth, base_url=base_url) - def __len__(self) -> int: - """Number of elements which the iterator can produce + def __del__(self): + self._client.close() - Returns: - int: The number of elements the iterator can produce - """ - page: Page = self._pagination_method(self._limit, 0) + def __len__(self) -> int: + """Number of elements which the iterator can produce""" + page: Page = self._first_page_callback() assert isinstance(page.total, int) - assert ( - page.total >= 0 - ), f"page.total={page.total} must be a nonnegative interger" - return max(page.total - self._offset, 0) + return page.total def __iter__(self) -> Generator[T, None, None]: - """Returns the generator - - Yields: - Generator[T,None,None]: The returned generator - """ + """Returns the generator""" if len(self) == 0: return + page: Page = self._first_page_callback() while True: - page: Page = self._pagination_method(self._limit, self._offset) assert page.items is not None assert isinstance(page.total, int) yield from page.items - self._offset += len(page.items) - if self._offset >= page.total: + if page.links.next is None: break + response: httpx.Response = self._client.get(page.links.next) + page = type(page)(**response.json()) + + +async def _file_chunk_generator( + file: Path, chunk_size: int +) -> AsyncGenerator[Tuple[bytes, int], None]: + if not file.is_file(): + raise RuntimeError(f"{file} must be a file") + if chunk_size <= 0: + raise RuntimeError(f"chunk_size={chunk_size} must be a positive int") + bytes_read: int = 0 + file_size: int = file.stat().st_size + while bytes_read < file_size: + with open(file, "rb") as f: + f.seek(bytes_read) + nbytes = ( + chunk_size + if (bytes_read + chunk_size <= file_size) + else (file_size - bytes_read) + ) + assert nbytes > 0 + chunk = await asyncio.get_event_loop().run_in_executor(None, f.read, nbytes) + yield chunk, nbytes + bytes_read += nbytes + + +S = TypeVar("S") + + +async def _fcn_to_coro(callback: Callable[..., S], *args) -> S: + """Get a coroutine from a callback.""" + result = await asyncio.get_event_loop().run_in_executor(None, callback, *args) + return result diff --git a/clients/python/client/osparc/_warnings_and_errors.py b/clients/python/client/osparc/_warnings_and_errors.py index 5307840a..918d570f 100644 --- a/clients/python/client/osparc/_warnings_and_errors.py +++ b/clients/python/client/osparc/_warnings_and_errors.py @@ -3,3 +3,7 @@ class VisibleDeprecationWarning(UserWarning): Acknowledgement: Having this wrapper is borrowed from numpy """ + + +class RequestError(Exception): + """For exceptions encountered when performing HTTP requests.""" diff --git a/clients/python/client/setup.py b/clients/python/client/setup.py index 56931b29..90703efa 100644 --- a/clients/python/client/setup.py +++ b/clients/python/client/setup.py @@ -17,7 +17,7 @@ # prerequisite: setuptools # http://pypi.python.org/pypi/setuptools -REQUIRES = [f"osparc_client=={VERSION}"] +REQUIRES = [f"osparc_client=={VERSION}", "httpx", "tqdm", "nest_asyncio"] setup( name=NAME, diff --git a/clients/python/test/e2e/ci/generate_pytest_ini.py b/clients/python/test/e2e/ci/generate_pytest_ini.py index ef9c4746..e4ab8db3 100644 --- a/clients/python/test/e2e/ci/generate_pytest_ini.py +++ b/clients/python/test/e2e/ci/generate_pytest_ini.py @@ -70,7 +70,11 @@ def main(client_config: str, server_config: str) -> None: / (client_cfg.client_ref + "_" + server_cfg.url.netloc) / f"junit_{client_cfg.client_ref}_{server_cfg.url.netloc}.xml" ).relative_to(Path("../../").resolve()) - add_opts: str = f"--html={html_log} --self-contained-html --junitxml={junit_xml}" + junit_prefix: str = f"{client_cfg.client_ref}+{server_cfg.url.netloc}" + add_opts: str = ( + f"--html={html_log} --self-contained-html " + f"--junitxml={junit_xml} --junit-prefix={junit_prefix}" + ) pytest_config: PytestConfig = PytestConfig( env="\n" + "\n".join(envs), required_plugins="pytest-env pytest-html", diff --git a/clients/python/test/e2e/conftest.py b/clients/python/test/e2e/conftest.py new file mode 100644 index 00000000..6b1ea80a --- /dev/null +++ b/clients/python/test/e2e/conftest.py @@ -0,0 +1,19 @@ +import os + +import osparc +import pytest + + +@pytest.fixture +def cfg() -> osparc.Configuration: + """Configuration + + Returns: + osparc.Configuration: The Configuration + """ + cfg = osparc.Configuration( + host=os.environ["OSPARC_API_HOST"], + username=os.environ["OSPARC_API_KEY"], + password=os.environ["OSPARC_API_SECRET"], + ) + return cfg diff --git a/clients/python/test/e2e/test_files_api.py b/clients/python/test/e2e/test_files_api.py new file mode 100644 index 00000000..a4c91982 --- /dev/null +++ b/clients/python/test/e2e/test_files_api.py @@ -0,0 +1,50 @@ +import hashlib +from pathlib import Path + +import osparc +import pytest +from packaging.version import Version + +_KB = 1024 # in bytes +_MB = _KB * 1024 # in bytes +_GB = _MB * 1024 # in bytes + + +def _hash_file(file: Path) -> str: + assert file.is_file() + md5 = hashlib.md5() + with open(file, "rb") as f: + while True: + data = f.read(100 * _KB) + if not data: + break + md5.update(data) + return md5.hexdigest() + + +# @pytest.mark.skip(reason="Skipped until files_api.delete_file() is implemented") +@pytest.mark.skipif( + Version(osparc.__version__) < Version("0.6.0"), + reason=f"osparc.__version__={osparc.__version__} is older than 0.6.0", +) +def test_upload_file(tmp_path: Path, cfg: osparc.Configuration) -> None: + """Test that we can upload a file via the multipart upload""" + # create file to upload + byte_size: int = 10 * _GB + tmp_file = tmp_path / "large_test_file.txt" + tmp_file.write_bytes(b"large test file") + with open(tmp_file, "wb") as f: + f.truncate(byte_size) + assert ( + tmp_file.stat().st_size == byte_size + ), f"Could not create file of size: {byte_size}" + + with osparc.ApiClient(cfg) as api_client: + files_api: osparc.FilesApi = osparc.FilesApi(api_client=api_client) + uploaded_file: osparc.File = files_api.upload_file(tmp_file) + downloaded_file = files_api.download_file( + uploaded_file.id, destination_folder=tmp_path + ) + assert Path(downloaded_file).parent == tmp_path + assert _hash_file(Path(downloaded_file)) == _hash_file(tmp_file) + files_api.delete_file(uploaded_file.id) diff --git a/clients/python/test/e2e/test_solvers_api.py b/clients/python/test/e2e/test_solvers_api.py index c5fe2499..919b8404 100644 --- a/clients/python/test/e2e/test_solvers_api.py +++ b/clients/python/test/e2e/test_solvers_api.py @@ -1,31 +1,14 @@ -import os - import osparc import pytest from packaging.version import Version -@pytest.fixture -def configuration() -> osparc.Configuration: - """Configuration - - Returns: - osparc.Configuration: The Configuration - """ - cfg = osparc.Configuration( - host=os.environ["OSPARC_API_HOST"], - username=os.environ["OSPARC_API_KEY"], - password=os.environ["OSPARC_API_SECRET"], - ) - return cfg - - @pytest.mark.skipif( Version(osparc.__version__) < Version("0.6.0"), reason=f"osparc.__version__={osparc.__version__} is older than 0.6.0", ) -def test_get_jobs(configuration: osparc.Configuration): - """Test the get_jobs method +def test_jobs(cfg: osparc.Configuration): + """Test the jobs method Args: configuration (osparc.Configuration): The Configuration @@ -33,12 +16,12 @@ def test_get_jobs(configuration: osparc.Configuration): solver: str = "simcore/services/comp/itis/sleeper" version: str = "2.0.2" n_jobs: int = 3 - with osparc.ApiClient(configuration) as api_client: + with osparc.ApiClient(cfg) as api_client: solvers_api: osparc.SolversApi = osparc.SolversApi(api_client) sleeper: osparc.Solver = solvers_api.get_solver_release(solver, version) # initial iterator - init_iter = solvers_api.get_jobs(sleeper.id, sleeper.version, limit=3) + init_iter = solvers_api.jobs(sleeper.id, sleeper.version) n_init_iter: int = len(init_iter) assert n_init_iter >= 0 @@ -50,18 +33,19 @@ def test_get_jobs(configuration: osparc.Configuration): ) created_job_ids.append(job.id) - tmp_iter = solvers_api.get_jobs( - sleeper.id, sleeper.version, limit=3, offset=n_init_iter - ) + tmp_iter = solvers_api.jobs(sleeper.id, sleeper.version) + solvers_api.jobs(sleeper.id, sleeper.version) - final_iter = solvers_api.get_jobs(sleeper.id, sleeper.version, limit=3) + final_iter = solvers_api.jobs(sleeper.id, sleeper.version) assert len(final_iter) > 0, "No jobs were available" assert n_init_iter + n_jobs == len( final_iter ), "An incorrect number of jobs was recorded" - for elm in tmp_iter: + for ii, elm in enumerate(tmp_iter): assert isinstance(elm, osparc.Job) + if ii > 100: + break # cleanup for elm in created_job_ids: