From a3907134568c24b4bbcfbdcfb9520be5ea73f827 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 08:14:04 +0100 Subject: [PATCH 01/12] adding UTC timezone to the project --- scripts/demo/create_portal_markdown.py | 10 +++-- .../api/routes/files.py | 4 +- .../api/routes/health.py | 6 +-- .../models/schemas/jobs.py | 8 ++-- .../utils/solver_job_models_converters.py | 6 +-- .../api/routes/health.py | 4 +- .../unit/with_dbs/test_api_routes_services.py | 7 +++- .../api/routes/health.py | 4 +- .../api/routes/health.py | 7 ++-- .../modules/db/repositories/comp_runs.py | 12 +++--- .../modules/db/repositories/comp_tasks.py | 4 +- .../dynamic_sidecar/docker_api/_volume.py | 4 +- .../utils/computations.py | 4 +- .../tests/unit/with_dbs/conftest.py | 42 +++++++++---------- .../with_dbs/test_api_route_computations.py | 8 +++- ...test_modules_dynamic_sidecar_docker_api.py | 4 +- .../api/_health.py | 4 +- .../api/_invitations.py | 4 +- .../invitations.py | 4 +- .../tests/unit/test_invitations.py | 6 +-- .../src/simcore_service_storage/models.py | 12 +++--- .../simcore_service_storage/simcore_s3_dsm.py | 8 ++-- .../storage/tests/unit/test_dsm_dsmcleaner.py | 10 ++--- services/storage/tests/unit/test_utils.py | 6 +-- .../exporter/formatters/formatter_v1.py | 4 +- .../login/_confirmation.py | 4 +- .../simcore_service_webserver/login/cli.py | 4 +- .../login/handlers_registration.py | 6 ++- .../login/storage.py | 6 +-- .../projects/projects_api.py | 4 +- .../src/simcore_service_webserver/utils.py | 4 +- .../utils_rate_limiting.py | 4 +- .../unit/isolated/test_projects_models.py | 4 +- .../tests/unit/isolated/test_users_models.py | 4 +- .../server/tests/unit/isolated/test_utils.py | 7 ++-- .../tests/unit/with_dbs/02/test_project_db.py | 4 +- .../02/test_projects_handlers__open_close.py | 4 +- .../02/test_projects_nodes_handler.py | 4 +- .../tests/unit/with_dbs/03/test_users_db.py | 4 +- 39 files changed, 134 insertions(+), 121 deletions(-) diff --git a/scripts/demo/create_portal_markdown.py b/scripts/demo/create_portal_markdown.py index 217e046d28a..868d51b2653 100644 --- a/scripts/demo/create_portal_markdown.py +++ b/scripts/demo/create_portal_markdown.py @@ -11,7 +11,7 @@ import logging import sys from contextlib import contextmanager -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from string import ascii_uppercase from typing import Optional @@ -89,7 +89,7 @@ def main(mock_codes, *, trial_account_days: Optional[int] = None, uid: int = 1): with _open(file_path) as fh: print( "".format( - current_path.name, datetime.utcnow() + current_path.name, datetime.now(timezone.utc).replace(tzinfo=None) ), file=fh, ) @@ -133,7 +133,11 @@ def main(mock_codes, *, trial_account_days: Optional[int] = None, uid: int = 1): ) print('""issuer"" : ""support@osparc.io"" ,', file=fh) print(f'""trial_account_days"" : ""{trial_account_days}""', file=fh) - print('}",%s' % datetime.now().isoformat(sep=" "), file=fh) + print( + '}",%s' + % datetime.now(timezone.utc).replace(tzinfo=None).isoformat(sep=" "), + file=fh, + ) if __name__ == "__main__": diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index 9c7e0e25596..fc9343572d7 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -103,7 +103,9 @@ async def upload_file( ) # assign file_id. file_meta: File = await File.create_from_uploaded( - file, file_size=file_size, created_at=datetime.utcnow().isoformat() + file, + file_size=file_size, + created_at=datetimedatetime.now(datetime.timezone.utc).isoformat(), ) logger.debug( "Assigned id: %s of %s bytes (content-length), real size %s bytes", diff --git a/services/api-server/src/simcore_service_api_server/api/routes/health.py b/services/api-server/src/simcore_service_api_server/api/routes/health.py index e4cd5c41ef6..b65fbc2389e 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/health.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/health.py @@ -1,6 +1,6 @@ import asyncio from datetime import datetime -from typing import Callable, Tuple +from typing import Callable from fastapi import APIRouter, Depends from fastapi.responses import PlainTextResponse @@ -19,7 +19,7 @@ @router.get("/", include_in_schema=False, response_class=PlainTextResponse) async def check_service_health(): - return f"{__name__}@{datetime.utcnow().isoformat()}" + return f"{__name__}@{datetimedatetime.now(datetime.timezone.utc).isoformat()}" @router.get("/state", include_in_schema=False) @@ -31,7 +31,7 @@ async def get_service_state( url_for: Callable = Depends(get_reverse_url_mapper), ): apis = (catalog_client, director2_api, storage_client, webserver_client) - heaths: Tuple[bool] = await asyncio.gather(*[api.is_responsive() for api in apis]) + heaths: tuple[bool] = await asyncio.gather(*[api.is_responsive() for api in apis]) current_status = AppStatusCheck( app_name=PROJECT_NAME, diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index 49af4373530..e7109b2a7ec 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -1,7 +1,7 @@ import hashlib from datetime import datetime from enum import Enum -from typing import Optional, Type, Union +from typing import Optional, Union from uuid import UUID, uuid4 from pydantic import BaseModel, Field, HttpUrl, conint, validator @@ -174,7 +174,7 @@ def create_now( id=global_uuid, runner_name=parent_name, inputs_checksum=inputs_checksum, - created_at=datetime.utcnow(), + created_at=datetimedatetime.now(datetime.timezone.utc), url=None, runner_url=None, outputs_url=None, @@ -215,7 +215,7 @@ class TaskStates(str, Enum): ABORTED = "ABORTED" -PercentageInt: Type[int] = conint(ge=0, le=100) +PercentageInt: type[int] = conint(ge=0, le=100) class JobStatus(BaseModel): @@ -254,5 +254,5 @@ class Config(BaseConfig): } def take_snapshot(self, event: str = "submitted"): - setattr(self, f"{event}_at", datetime.utcnow()) + setattr(self, f"{event}_at", datetimedatetime.now(datetime.timezone.utc)) return getattr(self, f"{event}_at") diff --git a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py index 5e079bb6804..7c21c93c41f 100644 --- a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py @@ -28,7 +28,7 @@ _BASE_UUID = uuid.UUID("231e13db-6bc6-4f64-ba56-2ee2c73b9f09") -@lru_cache() +@lru_cache def compose_uuid_from(*values) -> str: composition = "/".join(map(str, values)) new_uuid = uuid.uuid5(_BASE_UUID, composition) @@ -41,7 +41,7 @@ def format_datetime(snapshot: datetime) -> str: def now_str() -> str: # NOTE: backend MUST use UTC - return format_datetime(datetime.utcnow()) + return format_datetime(datetimedatetime.now(datetime.timezone.utc)) # CONVERTERS -------------- @@ -248,7 +248,7 @@ def create_jobstatus_from_task(task: ComputationTaskGet) -> JobStatus: job_id=task.id, state=task.state, progress=task.guess_progress(), - submitted_at=datetime.utcnow(), + submitted_at=datetimedatetime.now(datetime.timezone.utc), ) # FIXME: timestamp is wrong but at least it will stop run diff --git a/services/catalog/src/simcore_service_catalog/api/routes/health.py b/services/catalog/src/simcore_service_catalog/api/routes/health.py index 39569335684..8f636f448ce 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/health.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/health.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from fastapi import APIRouter @@ -7,4 +7,4 @@ @router.get("/", include_in_schema=False) async def check_service_health(): - return f"{__name__}@{datetime.utcnow().isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" diff --git a/services/catalog/tests/unit/with_dbs/test_api_routes_services.py b/services/catalog/tests/unit/with_dbs/test_api_routes_services.py index 34f8dfbfe24..f67159ac396 100644 --- a/services/catalog/tests/unit/with_dbs/test_api_routes_services.py +++ b/services/catalog/tests/unit/with_dbs/test_api_routes_services.py @@ -6,7 +6,7 @@ # pylint: disable=unused-variable import re -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Callable import pytest @@ -233,6 +233,7 @@ async def test_list_services_without_details_with_wrong_product_returns_0_servic assert len(data) == 0 +@pytest.mark.testit async def test_list_services_that_are_deprecated( disable_service_caching, mock_catalog_background_task, @@ -248,7 +249,9 @@ async def test_list_services_that_are_deprecated( len(products_names) > 1 ), "please adjust the fixture to have the right number of products" # injects fake data in db - deprecation_date = datetime.utcnow() + timedelta(days=1) + deprecation_date = datetime.now(timezone.utc).replace(tzinfo=None) + timedelta( + days=1 + ) deprecated_service = service_catalog_faker( "simcore/services/dynamic/jupyterlab", "1.0.1", diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py index a775627ca91..24404c1f3de 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Callable from fastapi import APIRouter, Depends @@ -23,7 +23,7 @@ status_code=status.HTTP_200_OK, ) async def get_service_alive(): - return f"{__name__}@{datetime.utcnow().isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" @router.get("/ready", status_code=status.HTTP_200_OK, response_model=AppStatusCheck) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/health.py b/services/director-v2/src/simcore_service_director_v2/api/routes/health.py index cc2d21f7669..77a63d204eb 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/health.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/health.py @@ -1,5 +1,4 @@ -from datetime import datetime -from typing import Dict +from datetime import datetime, timezone from fastapi import APIRouter @@ -7,5 +6,5 @@ @router.get("/") -async def check_service_health() -> Dict[str, str]: - return {"timestamp": f"{__name__}@{datetime.utcnow().isoformat()}"} +async def check_service_health() -> dict[str, str]: + return {"timestamp": f"{__name__}@{datetime.now(timezone.utc).isoformat()}"} diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 82b5e1840a0..62c0829a9e7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -1,7 +1,7 @@ import logging from collections import deque -from datetime import datetime -from typing import List, Optional, Set +from datetime import datetime, timezone +from typing import Optional import sqlalchemy as sa from aiopg.sa.result import RowProxy @@ -52,8 +52,8 @@ async def get( return CompRunsAtDB.from_orm(row) async def list( - self, filter_by_state: Optional[Set[RunningState]] = None - ) -> List[CompRunsAtDB]: + self, filter_by_state: Optional[set[RunningState]] = None + ) -> list[CompRunsAtDB]: if not filter_by_state: filter_by_state = set() runs_in_db = deque() @@ -99,7 +99,7 @@ async def create( cluster_id=cluster_id if cluster_id != DEFAULT_CLUSTER_ID else None, iteration=iteration, result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED], - started=datetime.utcnow(), + started=datetime.now(timezone.utc), ) .returning(literal_column("*")) ) @@ -133,7 +133,7 @@ async def set_run_result( ) -> Optional[CompRunsAtDB]: values = {"result": RUNNING_STATE_TO_DB[result_state]} if final_state: - values.update({"ended": datetime.utcnow()}) + values.update({"ended": datetime.now(timezone.utc)}) return await self.update( user_id, project_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py index d79f616cf11..5a1cdcaaeab 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py @@ -1,6 +1,6 @@ import asyncio import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Optional import sqlalchemy as sa @@ -94,7 +94,7 @@ async def _generate_tasks_list_from_project( inputs=node.inputs, outputs=node.outputs, image=image, - submit=datetime.utcnow(), + submit=datetime.now(timezone.utc), state=task_state, internal_id=internal_id, node_class=node_class, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py index fa320fd57d1..faf4bda3f24 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from fastapi.encoders import jsonable_encoder from models_library.projects import ProjectID @@ -152,7 +152,7 @@ async def remove_pending_volume_removal_services( volume_removal_service["Spec"]["Labels"]["service_timeout_s"] ) created_at = to_datetime(volume_removal_services[0]["CreatedAt"]) - time_diff = datetime.utcnow() - created_at + time_diff = datetime.now(timezone.utc) - created_at service_timed_out = time_diff.seconds > (service_timeout_s * 10) if service_timed_out: service_id = volume_removal_service["ID"] diff --git a/services/director-v2/src/simcore_service_director_v2/utils/computations.py b/services/director-v2/src/simcore_service_director_v2/utils/computations.py index 8167097c87f..851ab929b48 100644 --- a/services/director-v2/src/simcore_service_director_v2/utils/computations.py +++ b/services/director-v2/src/simcore_service_director_v2/utils/computations.py @@ -1,6 +1,6 @@ import logging import re -from datetime import datetime +from datetime import datetime, timezone from typing import Any from models_library.projects_state import RunningState @@ -118,7 +118,7 @@ async def find_deprecated_tasks( for key_version in task_key_versions ) ) - today = datetime.utcnow() + today = datetime.now(timezone.utc).replace(tzinfo=None) def _is_service_deprecated(service: dict[str, Any]) -> bool: if deprecation_date := service.get("deprecated"): diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index de612a6db04..7a5648cc910 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -6,8 +6,8 @@ import json -from datetime import datetime -from typing import Any, Callable, Dict, Iterator, List +from datetime import datetime, timezone +from typing import Any, Callable, Iterator from uuid import uuid4 import pytest @@ -35,7 +35,7 @@ def pipeline( postgres_db: sa.engine.Engine, ) -> Iterator[Callable[..., CompPipelineAtDB]]: - created_pipeline_ids: List[str] = [] + created_pipeline_ids: list[str] = [] def creator(**pipeline_kwargs) -> CompPipelineAtDB: pipeline_config = { @@ -66,13 +66,13 @@ def creator(**pipeline_kwargs) -> CompPipelineAtDB: @pytest.fixture -def tasks(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., List[CompTaskAtDB]]]: - created_task_ids: List[int] = [] +def tasks(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., list[CompTaskAtDB]]]: + created_task_ids: list[int] = [] def creator( - user: Dict[str, Any], project: ProjectAtDB, **overrides_kwargs - ) -> List[CompTaskAtDB]: - created_tasks: List[CompTaskAtDB] = [] + user: dict[str, Any], project: ProjectAtDB, **overrides_kwargs + ) -> list[CompTaskAtDB]: + created_tasks: list[CompTaskAtDB] = [] for internal_id, (node_id, node_data) in enumerate(project.workbench.items()): task_config = { "project_id": f"{project.uuid}", @@ -99,7 +99,7 @@ def creator( ), # type: ignore "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, - "submit": datetime.utcnow(), + "submit": datetime.now(timezone.utc), "job_id": generate_dask_job_id( service_key=node_data.key, service_version=node_data.version, @@ -131,10 +131,10 @@ def creator( @pytest.fixture def runs(postgres_db: sa.engine.Engine) -> Iterator[Callable[..., CompRunsAtDB]]: - created_run_ids: List[int] = [] + created_run_ids: list[int] = [] def creator( - user: Dict[str, Any], project: ProjectAtDB, **run_kwargs + user: dict[str, Any], project: ProjectAtDB, **run_kwargs ) -> CompRunsAtDB: run_config = { "project_uuid": f"{project.uuid}", @@ -164,9 +164,9 @@ def creator( def cluster( postgres_db: sa.engine.Engine, ) -> Iterator[Callable[..., Cluster]]: - created_cluster_ids: List[str] = [] + created_cluster_ids: list[str] = [] - def creator(user: Dict[str, Any], **cluster_kwargs) -> Cluster: + def creator(user: dict[str, Any], **cluster_kwargs) -> Cluster: cluster_config = Cluster.Config.schema_extra["examples"][1] cluster_config["owner"] = user["primary_gid"] cluster_config.update(**cluster_kwargs) @@ -233,12 +233,12 @@ def creator(user: Dict[str, Any], **cluster_kwargs) -> Cluster: @pytest.fixture def published_project( - registered_user: Callable[..., Dict[str, Any]], + registered_user: Callable[..., dict[str, Any]], project: Callable[..., ProjectAtDB], pipeline: Callable[..., CompPipelineAtDB], - tasks: Callable[..., List[CompTaskAtDB]], - fake_workbench_without_outputs: Dict[str, Any], - fake_workbench_adjacency: Dict[str, Any], + tasks: Callable[..., list[CompTaskAtDB]], + fake_workbench_without_outputs: dict[str, Any], + fake_workbench_adjacency: dict[str, Any], ) -> PublishedProject: user = registered_user() created_project = project(user, workbench=fake_workbench_without_outputs) @@ -254,13 +254,13 @@ def published_project( @pytest.fixture def running_project( - registered_user: Callable[..., Dict[str, Any]], + registered_user: Callable[..., dict[str, Any]], project: Callable[..., ProjectAtDB], pipeline: Callable[..., CompPipelineAtDB], - tasks: Callable[..., List[CompTaskAtDB]], + tasks: Callable[..., list[CompTaskAtDB]], runs: Callable[..., CompRunsAtDB], - fake_workbench_without_outputs: Dict[str, Any], - fake_workbench_adjacency: Dict[str, Any], + fake_workbench_without_outputs: dict[str, Any], + fake_workbench_adjacency: dict[str, Any], ) -> RunningProject: user = registered_user() created_project = project(user, workbench=fake_workbench_without_outputs) diff --git a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py index dcc7900a589..f0486a0f5ca 100644 --- a/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py +++ b/services/director-v2/tests/unit/with_dbs/test_api_route_computations.py @@ -7,7 +7,7 @@ import json import re -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Any, Callable @@ -160,7 +160,10 @@ def mocked_catalog_service_fcts_deprecated( ).respond( json=fake_service_details.copy( update={ - "deprecated": (datetime.utcnow() - timedelta(days=1)).isoformat() + "deprecated": ( + datetime.now(timezone.utc).replace(tzinfo=None) + - timedelta(days=1) + ).isoformat() } ).dict(by_alias=True) ) @@ -249,6 +252,7 @@ async def test_start_computation( assert response.status_code == status.HTTP_201_CREATED, response.text +@pytest.mark.testit async def test_start_computation_with_deprecated_services_raises_406( minimal_configuration: None, mocked_director_service_fcts, diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 9f04bea3a3c..40ece8d0a64 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -5,7 +5,7 @@ import asyncio import logging import sys -from datetime import datetime +from datetime import datetime, timezone from typing import Any, AsyncIterable, AsyncIterator, Optional from uuid import UUID, uuid4 @@ -335,7 +335,7 @@ def labels_example(request: FixtureRequest) -> SimcoreServiceLabels: return request.param -@pytest.fixture(params=[None, datetime.utcnow()]) +@pytest.fixture(params=[None, datetime.now(timezone.utc)]) def time_dy_sidecar_became_unreachable(request: FixtureRequest) -> Optional[datetime]: return request.param diff --git a/services/invitations/src/simcore_service_invitations/api/_health.py b/services/invitations/src/simcore_service_invitations/api/_health.py index 7d8b180c215..611f200e73e 100644 --- a/services/invitations/src/simcore_service_invitations/api/_health.py +++ b/services/invitations/src/simcore_service_invitations/api/_health.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from fastapi import APIRouter from fastapi.responses import PlainTextResponse @@ -15,4 +15,4 @@ @router.get("/", response_class=PlainTextResponse) async def healthcheck(): - return f"{__name__}@{datetime.utcnow().isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" diff --git a/services/invitations/src/simcore_service_invitations/api/_invitations.py b/services/invitations/src/simcore_service_invitations/api/_invitations.py index f069513cf5f..c2577cdc16f 100644 --- a/services/invitations/src/simcore_service_invitations/api/_invitations.py +++ b/services/invitations/src/simcore_service_invitations/api/_invitations.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Optional from fastapi import APIRouter, Depends, HTTPException, status @@ -90,7 +90,7 @@ async def create_invitation( ) invitation = _InvitationContentAndLink( invitation_url=invitation_link, - created=datetime.utcnow(), + created=datetime.now(timezone.utc), **invitation_inputs.dict(), ) diff --git a/services/invitations/src/simcore_service_invitations/invitations.py b/services/invitations/src/simcore_service_invitations/invitations.py index 1f7f98a7216..c4f5c5bb065 100644 --- a/services/invitations/src/simcore_service_invitations/invitations.py +++ b/services/invitations/src/simcore_service_invitations/invitations.py @@ -1,7 +1,7 @@ import base64 import binascii import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Optional, cast from urllib import parse @@ -146,7 +146,7 @@ def _create_invitation_code( # builds content content = InvitationContent( **invitation_data.dict(), - created=datetime.utcnow(), + created=datetime.now(timezone.utc), ) content_jsonstr: str = _ContentWithShortNames.serialize(content) diff --git a/services/invitations/tests/unit/test_invitations.py b/services/invitations/tests/unit/test_invitations.py index 2eb59358429..76fc2751448 100644 --- a/services/invitations/tests/unit/test_invitations.py +++ b/services/invitations/tests/unit/test_invitations.py @@ -4,7 +4,7 @@ # pylint: disable=too-many-arguments import binascii -from datetime import datetime +from datetime import datetime, timezone from urllib import parse import cryptography.fernet @@ -39,7 +39,7 @@ def test_import_and_export_invitation_alias_by_alias( ): expected_content = InvitationContent( **invitation_data.dict(), - created=datetime.utcnow(), + created=datetime.now(timezone.utc), ) raw_data = _ContentWithShortNames.serialize(expected_content) @@ -52,7 +52,7 @@ def test_export_by_alias_produces_smaller_strings( ): content = InvitationContent( **invitation_data.dict(), - created=datetime.utcnow(), + created=datetime.now(timezone.utc), ) raw_data = _ContentWithShortNames.serialize(content) diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 0a71170d6b5..15d702ca62d 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -1,6 +1,6 @@ -import datetime import urllib.parse from dataclasses import dataclass +from datetime import datetime, timezone from typing import Optional from uuid import UUID @@ -54,14 +54,14 @@ class FileMetaDataAtDB(BaseModel): project_id: Optional[ProjectID] = None node_id: Optional[NodeID] = None user_id: UserID - created_at: datetime.datetime + created_at: datetime file_id: SimcoreS3FileID file_size: ByteSize - last_modified: datetime.datetime + last_modified: datetime entity_tag: Optional[ETag] = None is_soft_link: bool upload_id: Optional[UploadID] = None - upload_expires_at: Optional[datetime.datetime] = None + upload_expires_at: Optional[datetime] = None class Config: orm_mode = True @@ -70,7 +70,7 @@ class Config: class FileMetaData(FileMetaDataGet): upload_id: Optional[UploadID] = None - upload_expires_at: Optional[datetime.datetime] = None + upload_expires_at: Optional[datetime] = None location: LocationName bucket_name: str @@ -92,7 +92,7 @@ def from_simcore_node( ): parts = file_id.split("/") - now = datetime.datetime.utcnow() + now = datetime.now(timezone.utc) fmd_kwargs = { "file_uuid": file_id, "location_id": location_id, diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 88b70bce078..b1d416bdcb6 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -1,4 +1,3 @@ -import datetime import functools import logging import tempfile @@ -6,6 +5,7 @@ from collections import deque from contextlib import suppress from dataclasses import dataclass +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Any, Awaitable, Callable, Optional, Union @@ -586,7 +586,7 @@ async def _clean_expired_uploads(self): 1. will try to update the entry from S3 backend if exists 2. will delete the entry if nothing exists in S3 backend. """ - now = datetime.datetime.utcnow() + now = datetime.now(timezone.utc) async with self.engine.acquire() as conn: list_of_expired_uploads = await db_file_meta_data.list_fmds( conn, expired_after=now @@ -821,8 +821,8 @@ async def _create_fmd_for_upload( file_id: StorageFileID, upload_id: Optional[UploadID], ) -> FileMetaDataAtDB: - now = datetime.datetime.utcnow() - upload_expiration_date = now + datetime.timedelta( + now = datetime.now(timezone.utc) + upload_expiration_date = now + timedelta( seconds=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS ) fmd = FileMetaData.from_simcore_node( diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index 2a7ce80ac04..ee90143dd2b 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -7,8 +7,8 @@ # pylint: disable=too-many-branches import asyncio -import datetime import urllib.parse +from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Awaitable, Callable, Optional @@ -120,7 +120,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == simcore_file_id) - .values(upload_expires_at=datetime.datetime.utcnow()) + .values(upload_expires_at=datetime.now(timezone.utc)) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -195,7 +195,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == file_id) - .values(upload_expires_at=datetime.datetime.utcnow()) + .values(upload_expires_at=datetime.now(timezone.utc)) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -230,7 +230,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation ): """This test reproduces what create_file_upload_links in dsm does, but running the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.datetime.utcnow() + datetime.timedelta(minutes=5) + later_than_now = datetime.now(timezone.utc) + timedelta(minutes=5) fmd = FileMetaData.from_simcore_node( user_id, simcore_file_id, @@ -289,7 +289,7 @@ async def test_clean_expired_uploads_cleans_dangling_multipart_uploads_if_no_cor ): """This test reproduces what create_file_upload_links in dsm does, but running the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.datetime.utcnow() + datetime.timedelta(minutes=5) + later_than_now = datetime.now(timezone.utc) + timedelta(minutes=5) fmd = FileMetaData.from_simcore_node( user_id, simcore_file_id, diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py index 9f184d707dc..ee0d8753cd9 100644 --- a/services/storage/tests/unit/test_utils.py +++ b/services/storage/tests/unit/test_utils.py @@ -1,5 +1,5 @@ -import datetime import random +from datetime import datetime, timezone from pathlib import Path from typing import Callable, Optional from uuid import uuid4 @@ -54,7 +54,7 @@ async def test_download_files(tmpdir): random.randint(1, 1000000), "some_valid_entity_tag", None, - datetime.datetime.utcnow(), + datetime.now(timezone.utc), False, ), (random.randint(1, 1000000), "some_valid_entity_tag", None, None, True), @@ -64,7 +64,7 @@ def test_file_entry_valid( file_size: ByteSize, entity_tag: Optional[ETag], upload_id: Optional[UploadID], - upload_expires_at: Optional[datetime.datetime], + upload_expires_at: Optional[datetime], expected_validity: bool, create_simcore_file_id: Callable[[ProjectID, NodeID, str], SimcoreS3FileID], faker: Faker, diff --git a/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py b/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py index 5fc157048ee..66ce9fe49f7 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py +++ b/services/web/server/src/simcore_service_webserver/exporter/formatters/formatter_v1.py @@ -1,8 +1,8 @@ -import datetime import json import logging import traceback from collections import deque +from datetime import datetime, timezone from itertools import chain from pathlib import Path from typing import Deque, Optional @@ -363,7 +363,7 @@ async def import_files_and_validate_project( # between the original and new study shuffled_project_file.name = "{} {}".format( shuffled_project_file.name, - datetime.datetime.utcnow().strftime("%Y:%m:%d:%H:%M:%S"), + datetime.now(timezone.utc).replace(tzinfo=None).strftime("%Y:%m:%d:%H:%M:%S"), ) log.debug("Shuffled project data: %s", shuffled_project_file) diff --git a/services/web/server/src/simcore_service_webserver/login/_confirmation.py b/services/web/server/src/simcore_service_webserver/login/_confirmation.py index afa59f1bbe5..07781b689ce 100644 --- a/services/web/server/src/simcore_service_webserver/login/_confirmation.py +++ b/services/web/server/src/simcore_service_webserver/login/_confirmation.py @@ -6,7 +6,7 @@ Codes have expiration date (duration time is configurable) """ import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Optional from aiohttp import web @@ -77,6 +77,6 @@ async def is_confirmation_allowed( def is_confirmation_expired(cfg: LoginOptions, confirmation: ConfirmationTokenDict): - age = datetime.utcnow() - confirmation["created_at"] + age = datetime.now(timezone.utc).replace(tzinfo=None) - confirmation["created_at"] lifetime = cfg.get_confirmation_lifetime(confirmation["action"]) return age > lifetime diff --git a/services/web/server/src/simcore_service_webserver/login/cli.py b/services/web/server/src/simcore_service_webserver/login/cli.py index f72743f6ca0..d2cd0e98c17 100644 --- a/services/web/server/src/simcore_service_webserver/login/cli.py +++ b/services/web/server/src/simcore_service_webserver/login/cli.py @@ -1,5 +1,5 @@ import sys -from datetime import datetime +from datetime import datetime, timezone from typing import Optional import typer @@ -47,7 +47,7 @@ def invitations( fg=typer.colors.BLUE, ) - utcnow = datetime.utcnow() + utcnow = datetime.now(timezone.utc).replace(tzinfo=None) today: datetime = utcnow.today() print("code,user_id,action,data,created_at", file=sys.stdout) for n, code in enumerate(codes, start=1): diff --git a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py index 7d22dc45fc3..dd0a9a75afb 100644 --- a/services/web/server/src/simcore_service_webserver/login/handlers_registration.py +++ b/services/web/server/src/simcore_service_webserver/login/handlers_registration.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Literal, Optional from aiohttp import web @@ -117,7 +117,9 @@ async def register(request: web.Request): app=request.app, ) if invitation.trial_account_days: - expires_at = datetime.utcnow() + timedelta(invitation.trial_account_days) + expires_at = datetime.now(timezone.utc).replace(tzinfo=None) + timedelta( + invitation.trial_account_days + ) username = _get_user_name(registration.email) user: dict = await db.create_user( diff --git a/services/web/server/src/simcore_service_webserver/login/storage.py b/services/web/server/src/simcore_service_webserver/login/storage.py index 09d15f21192..183b92b5dd0 100644 --- a/services/web/server/src/simcore_service_webserver/login/storage.py +++ b/services/web/server/src/simcore_service_webserver/login/storage.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone from logging import getLogger from typing import Literal, Optional, TypedDict @@ -51,7 +51,7 @@ async def get_user(self, with_data) -> asyncpg.Record: return data async def create_user(self, data: dict) -> asyncpg.Record: - data.setdefault("created_at", datetime.utcnow()) + data.setdefault("created_at", datetime.now(timezone.utc).replace(tzinfo=None)) async with self.pool.acquire() as conn: data["id"] = await _sql.insert(conn, self.user_tbl, data) new_user = await _sql.find_one(conn, self.user_tbl, {"id": data["id"]}) @@ -82,7 +82,7 @@ async def create_confirmation( "user_id": user_id, "action": action, "data": data, - "created_at": datetime.utcnow(), + "created_at": datetime.now(timezone.utc).replace(tzinfo=None), } c = await _sql.insert( conn, self.confirm_tbl, confirmation, returning="code" diff --git a/services/web/server/src/simcore_service_webserver/projects/projects_api.py b/services/web/server/src/simcore_service_webserver/projects/projects_api.py index 6735ec78514..f6890c21c5c 100644 --- a/services/web/server/src/simcore_service_webserver/projects/projects_api.py +++ b/services/web/server/src/simcore_service_webserver/projects/projects_api.py @@ -13,7 +13,7 @@ import logging from collections import defaultdict from contextlib import suppress -from datetime import datetime +from datetime import datetime, timezone from pprint import pformat from typing import Any, Optional from uuid import UUID, uuid4 @@ -849,7 +849,7 @@ async def is_service_deprecated( ) if deprecation_date := service.get("deprecated"): deprecation_date = parse_obj_as(datetime, deprecation_date) - return datetime.utcnow() > deprecation_date + return datetime.now(timezone.utc).replace(tzinfo=None) > deprecation_date return False diff --git a/services/web/server/src/simcore_service_webserver/utils.py b/services/web/server/src/simcore_service_webserver/utils.py index 94a54a26d84..0aab0755841 100644 --- a/services/web/server/src/simcore_service_webserver/utils.py +++ b/services/web/server/src/simcore_service_webserver/utils.py @@ -9,7 +9,7 @@ import traceback import tracemalloc from collections import OrderedDict -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Any @@ -63,7 +63,7 @@ def gravatar_hash(email: str) -> str: def now() -> datetime: - return datetime.utcnow() + return datetime.now(timezone.utc).replace(tzinfo=None) def format_datetime(snapshot: datetime) -> str: diff --git a/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py b/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py index b5d06e606c2..03960bf44e6 100644 --- a/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py +++ b/services/web/server/src/simcore_service_webserver/utils_rate_limiting.py @@ -1,6 +1,6 @@ import json from dataclasses import dataclass -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from functools import wraps from math import ceil from typing import NamedTuple @@ -41,7 +41,7 @@ class _Context: @wraps(decorated_function) async def wrapper(*args, **kwargs): - utc_now = datetime.utcnow() + utc_now = datetime.now(timezone.utc).replace(tzinfo=None) utc_now_timestamp = datetime.timestamp(utc_now) # reset counter & first time initialization diff --git a/services/web/server/tests/unit/isolated/test_projects_models.py b/services/web/server/tests/unit/isolated/test_projects_models.py index 2234f77273d..01a9a1e2b1f 100644 --- a/services/web/server/tests/unit/isolated/test_projects_models.py +++ b/services/web/server/tests/unit/isolated/test_projects_models.py @@ -2,7 +2,7 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import datetime +from datetime import datetime, timezone import pytest from simcore_service_webserver.projects.projects_db_utils import ( @@ -39,7 +39,7 @@ def test_convert_to_schema_names(fake_db_dict): assert "anEntryThatUsesSnakeCase" in db_entries assert "anotherEntryThatUsesSnakeCase" in db_entries # test date time conversion - date = datetime.datetime.utcnow() + date = datetime.now(timezone.utc).replace(tzinfo=None) fake_db_dict["time_entry"] = date db_entries = convert_to_schema_names(fake_db_dict, fake_email) assert "timeEntry" in db_entries diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index 04e5dbdb492..24d01bcf66d 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -1,5 +1,5 @@ from copy import deepcopy -from datetime import datetime +from datetime import datetime, timezone from pprint import pformat from typing import Any @@ -50,7 +50,7 @@ def test_user_models_examples( def test_profile_get_expiration_date(faker: Faker): - fake_expiration = datetime.utcnow() + fake_expiration = datetime.now(timezone.utc) profile = ProfileGet( id=1, login=faker.email(), role=UserRole.ADMIN, expiration_date=fake_expiration diff --git a/services/web/server/tests/unit/isolated/test_utils.py b/services/web/server/tests/unit/isolated/test_utils.py index 609cd6cce29..2acd6b92cc9 100644 --- a/services/web/server/tests/unit/isolated/test_utils.py +++ b/services/web/server/tests/unit/isolated/test_utils.py @@ -4,8 +4,7 @@ import timeit import urllib.parse from contextlib import contextmanager -from datetime import datetime -from typing import Dict +from datetime import datetime, timezone from urllib.parse import unquote_plus import pytest @@ -30,7 +29,7 @@ def test_time_utils(): assert now0 < now1 # tests biyective - now_time = datetime.utcnow() + now_time = datetime.now(timezone.utc).replace(tzinfo=None) snapshot = now_time.strftime(DATETIME_FORMAT) assert now_time == datetime.strptime(snapshot, DATETIME_FORMAT) @@ -75,7 +74,7 @@ def test_yarl_url_compose_changed_with_latest_release(): @pytest.mark.skip(reason="DEV-demo") -async def test_compute_sha1_on_small_dataset(fake_project: Dict): +async def test_compute_sha1_on_small_dataset(fake_project: dict): # Based on GitHK review https://github.com/ITISFoundation/osparc-simcore/pull/2556: # From what I know, these having function tend to be a bit CPU intensive, based on the size of the dataset. # Could we maybe have an async version of this function here, run it on an executor? diff --git a/services/web/server/tests/unit/with_dbs/02/test_project_db.py b/services/web/server/tests/unit/with_dbs/02/test_project_db.py index 1ec239a9836..b190d486d7f 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/02/test_project_db.py @@ -5,10 +5,10 @@ # pylint: disable=unused-variable import asyncio -import datetime import json import re from copy import deepcopy +from datetime import datetime, timezone from itertools import combinations from random import randint from secrets import choice @@ -90,7 +90,7 @@ def test_convert_to_schema_names(fake_project: dict[str, Any]): assert col is not None # test date time conversion - date = datetime.datetime.utcnow() + date = datetime.now(timezone.utc) db_entries["creation_date"] = date schema_entries = convert_to_schema_names(db_entries, fake_project["prjOwner"]) assert "creationDate" in schema_entries diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_handlers__open_close.py b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers__open_close.py index 19d9755f8c5..81862ab27d9 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_handlers__open_close.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_handlers__open_close.py @@ -8,7 +8,7 @@ import json import time from copy import deepcopy -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Any, Awaitable, Callable, Iterator, Optional, Union from unittest import mock from unittest.mock import call @@ -564,7 +564,7 @@ async def test_open_project_with_deprecated_services_ok_but_does_not_start_dynam mock_catalog_api: dict[str, mock.Mock], ): mock_catalog_api["get_service"].return_value["deprecated"] = ( - datetime.utcnow() - timedelta(days=1) + datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=1) ).isoformat() url = client.app.router["open_project"].url_for(project_id=user_project["uuid"]) resp = await client.post(url, json=client_session_id_factory()) diff --git a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py index 9430e3ed921..cd439de7882 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py +++ b/services/web/server/tests/unit/with_dbs/02/test_projects_nodes_handler.py @@ -5,7 +5,7 @@ import asyncio from dataclasses import dataclass, field -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from random import choice from typing import Any, Awaitable, Callable from unittest import mock @@ -436,7 +436,7 @@ async def test_creating_deprecated_node_returns_406_not_acceptable( node_class: str, ): mock_catalog_api["get_service"].return_value["deprecated"] = ( - datetime.utcnow() - timedelta(days=1) + datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(days=1) ).isoformat() assert client.app url = client.app.router["create_node"].url_for(project_id=user_project["uuid"]) diff --git a/services/web/server/tests/unit/with_dbs/03/test_users_db.py b/services/web/server/tests/unit/with_dbs/03/test_users_db.py index d9becbf600a..3e75e2123fe 100644 --- a/services/web/server/tests/unit/with_dbs/03/test_users_db.py +++ b/services/web/server/tests/unit/with_dbs/03/test_users_db.py @@ -2,7 +2,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Optional import pytest @@ -17,7 +17,7 @@ from simcore_postgres_database.models.users import UserStatus from simcore_service_webserver.users_db import update_expired_users -_NOW = datetime.utcnow() +_NOW = datetime.now(timezone.utc).replace(tzinfo=None) YESTERDAY = _NOW - timedelta(days=1) TOMORROW = _NOW + timedelta(days=1) From be56e2c383642de2abb7c52e1f212679996b594c Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 08:53:13 +0100 Subject: [PATCH 02/12] correction of services/api-server datetime UTC timezone --- .../src/simcore_service_api_server/api/routes/files.py | 4 ++-- .../src/simcore_service_api_server/api/routes/health.py | 4 ++-- .../src/simcore_service_api_server/models/schemas/jobs.py | 6 +++--- .../utils/solver_job_models_converters.py | 4 ++-- 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/api/routes/files.py b/services/api-server/src/simcore_service_api_server/api/routes/files.py index fc9343572d7..c35e0195eb0 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/files.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/files.py @@ -2,7 +2,7 @@ import io import logging from collections import deque -from datetime import datetime +from datetime import datetime, timezone from textwrap import dedent from typing import IO, Optional from uuid import UUID @@ -105,7 +105,7 @@ async def upload_file( file_meta: File = await File.create_from_uploaded( file, file_size=file_size, - created_at=datetimedatetime.now(datetime.timezone.utc).isoformat(), + created_at=datetime.now(timezone.utc).replace(tzinfo=None).isoformat(), ) logger.debug( "Assigned id: %s of %s bytes (content-length), real size %s bytes", diff --git a/services/api-server/src/simcore_service_api_server/api/routes/health.py b/services/api-server/src/simcore_service_api_server/api/routes/health.py index b65fbc2389e..2ff81a08635 100644 --- a/services/api-server/src/simcore_service_api_server/api/routes/health.py +++ b/services/api-server/src/simcore_service_api_server/api/routes/health.py @@ -1,5 +1,5 @@ import asyncio -from datetime import datetime +from datetime import datetime, timezone from typing import Callable from fastapi import APIRouter, Depends @@ -19,7 +19,7 @@ @router.get("/", include_in_schema=False, response_class=PlainTextResponse) async def check_service_health(): - return f"{__name__}@{datetimedatetime.now(datetime.timezone.utc).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" @router.get("/state", include_in_schema=False) diff --git a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py index e7109b2a7ec..205e2e6df8c 100644 --- a/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py +++ b/services/api-server/src/simcore_service_api_server/models/schemas/jobs.py @@ -1,5 +1,5 @@ import hashlib -from datetime import datetime +from datetime import datetime, timezone from enum import Enum from typing import Optional, Union from uuid import UUID, uuid4 @@ -174,7 +174,7 @@ def create_now( id=global_uuid, runner_name=parent_name, inputs_checksum=inputs_checksum, - created_at=datetimedatetime.now(datetime.timezone.utc), + created_at=datetime.now(timezone.utc).replace(tzinfo=None), url=None, runner_url=None, outputs_url=None, @@ -254,5 +254,5 @@ class Config(BaseConfig): } def take_snapshot(self, event: str = "submitted"): - setattr(self, f"{event}_at", datetimedatetime.now(datetime.timezone.utc)) + setattr(self, f"{event}_at", datetime.now(timezone.utc).replace(tzinfo=None)) return getattr(self, f"{event}_at") diff --git a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py index 7c21c93c41f..fbf1ba0d855 100644 --- a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py @@ -4,7 +4,7 @@ """ import urllib.parse import uuid -from datetime import datetime +from datetime import datetime, timezone from functools import lru_cache from typing import Callable, Optional @@ -41,7 +41,7 @@ def format_datetime(snapshot: datetime) -> str: def now_str() -> str: # NOTE: backend MUST use UTC - return format_datetime(datetimedatetime.now(datetime.timezone.utc)) + return format_datetime(datetime.now(timezone.utc).replace(tzinfo=None)) # CONVERTERS -------------- From 05562abd73ab96a8df0b9f3037200067cb77e688 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 09:53:53 +0100 Subject: [PATCH 03/12] correction of datetime UTC timezone --- .../utils/solver_job_models_converters.py | 2 +- .../unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py index fbf1ba0d855..2af652f480a 100644 --- a/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py +++ b/services/api-server/src/simcore_service_api_server/utils/solver_job_models_converters.py @@ -248,7 +248,7 @@ def create_jobstatus_from_task(task: ComputationTaskGet) -> JobStatus: job_id=task.id, state=task.state, progress=task.guess_progress(), - submitted_at=datetimedatetime.now(datetime.timezone.utc), + submitted_at=datetime.now(timezone.utc).replace(tzinfo=None), ) # FIXME: timestamp is wrong but at least it will stop run diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index 40ece8d0a64..231a56b8a13 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -335,7 +335,7 @@ def labels_example(request: FixtureRequest) -> SimcoreServiceLabels: return request.param -@pytest.fixture(params=[None, datetime.now(timezone.utc)]) +@pytest.fixture(params=[None, datetime.now(timezone.utc).replace(tzinfo=None)]) def time_dy_sidecar_became_unreachable(request: FixtureRequest) -> Optional[datetime]: return request.param From f8413fcf921ad06e956a6ec74088aa3420fb37fa Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 10:18:55 +0100 Subject: [PATCH 04/12] correction of datetime UTC timezone --- .../utils/pydantic_models_factory.py | 38 +++++----- .../models-library/tests/test_basic_regex.py | 9 ++- .../postgres-database/tests/test_users.py | 4 +- .../pytest_simcore/helpers/rawdata_fakers.py | 4 +- .../service_integration/commands/compose.py | 6 +- .../src/servicelib/exception_utils.py | 6 +- .../servicelib/long_running_tasks/_models.py | 6 +- .../servicelib/long_running_tasks/_task.py | 6 +- .../service-library/tests/fastapi/conftest.py | 7 +- .../src/simcore_service_director/producer.py | 76 ++++++++++--------- services/director/tests/test_utils.py | 4 +- .../exporter/formatters/models.py | 4 +- 12 files changed, 89 insertions(+), 81 deletions(-) diff --git a/packages/models-library/src/models_library/utils/pydantic_models_factory.py b/packages/models-library/src/models_library/utils/pydantic_models_factory.py index 4361364902a..d43e6745e98 100644 --- a/packages/models-library/src/models_library/utils/pydantic_models_factory.py +++ b/packages/models-library/src/models_library/utils/pydantic_models_factory.py @@ -23,7 +23,7 @@ """ import json import warnings -from typing import Dict, Iterable, Optional, Set, Tuple, Type +from typing import Iterable, Optional from pydantic import BaseModel, create_model, validator from pydantic.fields import ModelField, Undefined @@ -38,7 +38,7 @@ ) -def collect_fields_attrs(model_cls: Type[BaseModel]) -> Dict[str, Dict[str, str]]: +def collect_fields_attrs(model_cls: type[BaseModel]) -> dict[str, dict[str, str]]: """ >>> class MyModel(BaseModel): @@ -100,33 +100,31 @@ def _stringify(obj): def _eval_selection( model_fields: Iterable[ModelField], - include: Optional[Set[str]], - exclude: Optional[Set[str]], + include: Optional[set[str]], + exclude: Optional[set[str]], exclude_optionals: bool, -) -> Set[str]: +) -> set[str]: # TODO: use dict for deep include/exclude! SEE https://pydantic-docs.helpmanual.io/usage/exporting_models/ if include is None: - include = set(f.name for f in model_fields) + include = {f.name for f in model_fields} if exclude is None: exclude = set() if exclude_optionals: - exclude = exclude.union( - set(f.name for f in model_fields if f.required == False) - ) + exclude = exclude.union({f.name for f in model_fields if f.required == False}) selection = include - exclude return selection def _extract_field_definitions( - model_cls: Type[BaseModel], + model_cls: type[BaseModel], *, - include: Optional[Set[str]], - exclude: Optional[Set[str]], + include: Optional[set[str]], + exclude: Optional[set[str]], exclude_optionals: bool, set_all_optional: bool, -) -> Dict[str, Tuple]: +) -> dict[str, tuple]: """ Returns field_definitions: fields of the model in the format `=(, )` or `=`, @@ -136,7 +134,7 @@ def _extract_field_definitions( or, for complex use-cases, in the format `=`, e.g. - `foo=Field(default_factory=datetime.utcnow, alias='bar')` + `foo=Field(default_factory=datetime.now(timezone.utc).replace(tzinfo=None), alias='bar')` """ field_names = _eval_selection( @@ -160,16 +158,16 @@ def _extract_field_definitions( def copy_model( - reference_cls: Type[BaseModel], + reference_cls: type[BaseModel], *, name: str = None, - include: Optional[Set[str]] = None, - exclude: Optional[Set[str]] = None, + include: Optional[set[str]] = None, + exclude: Optional[set[str]] = None, exclude_optionals: bool = False, as_update_model: bool = False, skip_validators: bool = False, - __config__: Type[BaseConfig] = None, -) -> Type[BaseModel]: + __config__: type[BaseConfig] = None, +) -> type[BaseModel]: """ Creates a clone of `reference_cls` with a different name and a subset of fields @@ -190,7 +188,7 @@ def copy_model( # VALIDATORS - validators_funs: Dict[str, classmethod] = {} + validators_funs: dict[str, classmethod] = {} # A dict of method names and @validator class methods # SEE example in https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation if not skip_validators and reference_cls != BaseModel: diff --git a/packages/models-library/tests/test_basic_regex.py b/packages/models-library/tests/test_basic_regex.py index e0d42f76cb9..f0f99845e36 100644 --- a/packages/models-library/tests/test_basic_regex.py +++ b/packages/models-library/tests/test_basic_regex.py @@ -6,7 +6,7 @@ import keyword import re -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Optional, Pattern, Sequence, Union import pytest @@ -172,7 +172,7 @@ class webserver_timedate_utils: @classmethod def now(cls) -> datetime: - return datetime.utcnow() + return datetime.now(timezone.utc).replace(tzinfo=None) @classmethod def format_datetime(cls, snapshot: datetime) -> str: @@ -195,7 +195,10 @@ def to_datetime(cls, snapshot: str) -> datetime: ("2020-12-30T23:15:00.345Z", ("12", "30", "23", ":00", "00", ".345")), ("2020-12-30 23:15:00", INVALID), (datetime.now().isoformat(), INVALID), # as '2020-11-29T23:09:21.859469' - (datetime.utcnow().isoformat(), INVALID), # as '2020-11-29T22:09:21.859469' + ( + datetime.now(timezone.utc).replace(tzinfo=None).isoformat(), + INVALID, + ), # as '2020-11-29T22:09:21.859469' (webserver_timedate_utils.now_str(), VALID), ( webserver_timedate_utils.format_datetime( diff --git a/packages/postgres-database/tests/test_users.py b/packages/postgres-database/tests/test_users.py index b8fb8e9c2c2..9b5f7f59d36 100644 --- a/packages/postgres-database/tests/test_users.py +++ b/packages/postgres-database/tests/test_users.py @@ -3,7 +3,7 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Optional import pytest @@ -77,7 +77,7 @@ async def test_trial_accounts(pg_engine: Engine): async with pg_engine.acquire() as conn: # creates trial user - client_now = datetime.utcnow() + client_now = datetime.now(timezone.utc).replace(tzinfo=None) user_id: Optional[int] = await conn.scalar( users.insert() .values( diff --git a/packages/pytest-simcore/src/pytest_simcore/helpers/rawdata_fakers.py b/packages/pytest-simcore/src/pytest_simcore/helpers/rawdata_fakers.py index f6b41e2cda2..12d4e9e866c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/helpers/rawdata_fakers.py +++ b/packages/pytest-simcore/src/pytest_simcore/helpers/rawdata_fakers.py @@ -14,7 +14,7 @@ import itertools import json import random -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from typing import Any, Callable, Final from uuid import uuid4 @@ -118,7 +118,7 @@ def fake_task_factory(first_internal_id=1) -> Callable: def fake_task(**overrides) -> dict[str, Any]: - t0 = datetime.utcnow() + t0 = datetime.now(timezone.utc).replace(tzinfo=None) data = dict( project_id=uuid4(), node_id=uuid4(), diff --git a/packages/service-integration/src/service_integration/commands/compose.py b/packages/service-integration/src/service_integration/commands/compose.py index ae68a660df2..45abedcdf56 100644 --- a/packages/service-integration/src/service_integration/commands/compose.py +++ b/packages/service-integration/src/service_integration/commands/compose.py @@ -1,5 +1,5 @@ import subprocess -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Optional @@ -107,8 +107,8 @@ def create_docker_compose_image_spec( "No explicit config for OCI/label-schema found (optional), skipping OCI annotations." ) # add required labels - extra_labels[f"{LS_LABEL_PREFIX}.build-date"] = datetime.utcnow().strftime( - "%Y-%m-%dT%H:%M:%SZ" + extra_labels[f"{LS_LABEL_PREFIX}.build-date"] = ( + datetime.now(timezone.utc).replace(tzinfo=None).strftime("%Y-%m-%dT%H:%M:%SZ") ) extra_labels[f"{LS_LABEL_PREFIX}.schema-version"] = "1.0" diff --git a/packages/service-library/src/servicelib/exception_utils.py b/packages/service-library/src/servicelib/exception_utils.py index 0a11ab87040..884596200b0 100644 --- a/packages/service-library/src/servicelib/exception_utils.py +++ b/packages/service-library/src/servicelib/exception_utils.py @@ -1,5 +1,5 @@ import logging -from datetime import datetime +from datetime import datetime, timezone from typing import Optional from pydantic import BaseModel, Field, NonNegativeFloat, PrivateAttr @@ -49,11 +49,11 @@ def try_to_raise(self, exception: BaseException) -> None: # first time the exception was detected if self._first_exception_skip is None: - self._first_exception_skip = datetime.utcnow() + self._first_exception_skip = datetime.now(timezone.utc).replace(tzinfo=None) # raise if subsequent exception is outside of delay window elif ( - datetime.utcnow() - self._first_exception_skip + datetime.now(timezone.utc).replace(tzinfo=None) - self._first_exception_skip ).total_seconds() > self.delay_for: raise exception diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index d0a035b9e18..e609ff684bb 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -1,7 +1,7 @@ import logging import urllib.parse from asyncio import Task -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Awaitable, Callable, Coroutine, Optional from pydantic import ( @@ -76,7 +76,9 @@ class TrackedTask(BaseModel): description="if True then the task will not be auto-cancelled if no one enquires of its status", ) - started: datetime = Field(default_factory=datetime.utcnow) + started: datetime = Field( + default_factory=datetime.now(timezone.utc).replace(tzinfo=None) + ) last_status_check: Optional[datetime] = Field( default=None, description=( diff --git a/packages/service-library/src/servicelib/long_running_tasks/_task.py b/packages/service-library/src/servicelib/long_running_tasks/_task.py index 1610dacb2e4..36a8dd837bd 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_task.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_task.py @@ -5,7 +5,7 @@ import urllib.parse from collections import deque from contextlib import suppress -from datetime import datetime +from datetime import datetime, timezone from typing import Any, Optional, Protocol from uuid import uuid4 @@ -96,7 +96,7 @@ async def _stale_tasks_monitor_worker(self) -> None: # will not be the case. while await asyncio.sleep(self.stale_task_check_interval_s, result=True): - utc_now = datetime.utcnow() + utc_now = datetime.now(timezone.utc).replace(tzinfo=None) tasks_to_remove: list[TaskId] = [] for tasks in self._tasks_groups.values(): @@ -200,7 +200,7 @@ def get_task_status( raises TaskNotFoundError if the task cannot be found """ tracked_task: TrackedTask = self._get_tracked_task(task_id, with_task_context) - tracked_task.last_status_check = datetime.utcnow() + tracked_task.last_status_check = datetime.now(timezone.utc).replace(tzinfo=None) task = tracked_task.task done = task.done() diff --git a/packages/service-library/tests/fastapi/conftest.py b/packages/service-library/tests/fastapi/conftest.py index 9fb84bb62ff..d0e10afeb6d 100644 --- a/packages/service-library/tests/fastapi/conftest.py +++ b/packages/service-library/tests/fastapi/conftest.py @@ -3,7 +3,7 @@ # pylint: disable=unused-variable import socket -from datetime import datetime +from datetime import datetime, timezone from typing import AsyncIterable, Callable, cast import pytest @@ -23,7 +23,10 @@ def app() -> FastAPI: @api_router.get("/") def _get_root(): - return {"name": __name__, "timestamp": datetime.utcnow().isoformat()} + return { + "name": __name__, + "timestamp": datetime.now(timezone.utc).replace(tzinfo=None).isoformat(), + } @api_router.get("/data") def _get_data(x: PositiveFloat, y: int = Query(..., gt=3, lt=4)): diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index a34f06508fc..7573b171949 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -2,12 +2,12 @@ import json import logging import re -from datetime import datetime, timedelta +from datetime import datetime, timedelta, timezone from distutils.version import StrictVersion from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Dict, List, Optional, Tuple +from typing import Optional import aiodocker import aiohttp @@ -50,7 +50,7 @@ class ServiceState(Enum): FAILED = "failed" -async def _create_auth() -> Dict[str, str]: +async def _create_auth() -> dict[str, str]: return {"username": config.REGISTRY_USER, "password": config.REGISTRY_PW} @@ -74,12 +74,12 @@ async def _check_node_uuid_available( log.debug("UUID %s is free", node_uuid) -def _check_setting_correctness(setting: Dict) -> None: +def _check_setting_correctness(setting: dict) -> None: if "name" not in setting or "type" not in setting or "value" not in setting: raise exceptions.DirectorException("Invalid setting in %s" % setting) -def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: +def _parse_mount_settings(settings: list[dict]) -> list[dict]: mounts = [] for s in settings: log.debug("Retrieved mount settings %s", s) @@ -103,7 +103,7 @@ def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: return mounts -def _parse_env_settings(settings: List[str]) -> Dict: +def _parse_env_settings(settings: list[str]) -> dict: envs = {} for s in settings: log.debug("Retrieved env settings %s", s) @@ -119,7 +119,7 @@ def _parse_env_settings(settings: List[str]) -> Dict: async def _read_service_settings( app: web.Application, key: str, tag: str, settings_name: str -) -> Dict: +) -> dict: image_labels = await registry_proxy.get_image_labels(app, key, tag) settings = ( json.loads(image_labels[settings_name]) if settings_name in image_labels else {} @@ -141,7 +141,7 @@ async def _create_docker_service_params( project_id: str, node_base_path: str, internal_network_id: Optional[str], -) -> Dict: +) -> dict: # pylint: disable=too-many-statements service_parameters_labels = await _read_service_settings( app, service_key, service_tag, config.SERVICE_RUNTIME_SETTINGS @@ -331,7 +331,7 @@ async def _create_docker_service_params( ) elif param["name"] == "mount": log.debug("Found mount parameter %s", param["value"]) - mount_settings: List[Dict] = _parse_mount_settings(param["value"]) + mount_settings: list[dict] = _parse_mount_settings(param["value"]) if mount_settings: docker_params["task_template"]["ContainerSpec"]["Mounts"].extend( mount_settings @@ -371,7 +371,7 @@ async def _create_docker_service_params( return docker_params -def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: +def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: log.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) @@ -381,10 +381,10 @@ def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: return "" -async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: +async def _get_swarm_network(client: aiodocker.docker.Docker) -> dict: network_name = "_default" if config.SIMCORE_SERVICES_NETWORK_NAME: - network_name = "{}".format(config.SIMCORE_SERVICES_NETWORK_NAME) + network_name = f"{config.SIMCORE_SERVICES_NETWORK_NAME}" # try to find the network name (usually named STACKNAME_default) networks = [ x @@ -403,8 +403,8 @@ async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: async def _get_docker_image_port_mapping( - service: Dict, -) -> Tuple[Optional[str], Optional[int]]: + service: dict, +) -> tuple[Optional[str], Optional[int]]: log.debug("getting port published by service: %s", service["Spec"]["Name"]) published_ports = [] @@ -438,7 +438,7 @@ async def _get_docker_image_port_mapping( async def _pass_port_to_service( service_name: str, port: str, - service_boot_parameters_labels: Dict, + service_boot_parameters_labels: dict, session: ClientSession, ) -> None: for param in service_boot_parameters_labels: @@ -525,16 +525,18 @@ async def _remove_overlay_network_of_swarm( async def _get_service_state( - client: aiodocker.docker.Docker, service: Dict -) -> Tuple[ServiceState, str]: + client: aiodocker.docker.Docker, service: dict +) -> tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] log.debug("Getting service %s state", service_name) tasks = await client.tasks.list(filters={"service": service_name}) async def _wait_for_tasks(tasks): - task_started_time = datetime.utcnow() - while (datetime.utcnow() - task_started_time) < timedelta(seconds=20): + task_started_time = datetime.now(timezone.utc).replace(tzinfo=None) + while ( + datetime.now(timezone.utc).replace(tzinfo=None) - task_started_time + ) < timedelta(seconds=20): tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) tasks = [x for x in tasks if x["ServiceID"] == service["ID"]] @@ -579,7 +581,7 @@ async def _wait_for_tasks(tasks): elif task_state in ("ready", "starting"): last_task_state = ServiceState.STARTING elif task_state in ("running"): - now = datetime.utcnow() + now = datetime.now(timezone.utc).replace(tzinfo=None) # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' task_state_update_time = parse_as_datetime( last_task["Status"]["Timestamp"], default=now @@ -601,7 +603,7 @@ async def _wait_for_tasks(tasks): async def _wait_until_service_running_or_failed( - client: aiodocker.docker.Docker, service: Dict, node_uuid: str + client: aiodocker.docker.Docker, service: dict, node_uuid: str ) -> None: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] @@ -629,7 +631,7 @@ async def _wait_until_service_running_or_failed( async def _get_repos_from_key( app: web.Application, service_key: str -) -> Dict[str, List[Dict]]: +) -> dict[str, list[dict]]: # get the available image for the main service (syntax is image:tag) list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) @@ -649,7 +651,7 @@ async def _get_repos_from_key( async def _get_dependant_repos( app: web.Application, service_key: str, service_tag: str -) -> List[Dict]: +) -> list[dict]: list_of_images = await _get_repos_from_key(app, service_key) tag = await _find_service_tag(list_of_images, service_key, service_tag) # look for dependencies @@ -666,7 +668,7 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: Dict, service_key: str, service_tag: str + list_of_images: dict, service_key: str, service_tag: str ) -> str: if not service_key in list_of_images: raise exceptions.ServiceNotAvailableError( @@ -703,7 +705,7 @@ async def _start_docker_service( node_uuid: str, node_base_path: str, internal_network_id: Optional[str], -) -> Dict: # pylint: disable=R0913 +) -> dict: # pylint: disable=R0913 service_parameters = await _create_docker_service_params( app, client, @@ -728,7 +730,7 @@ async def _start_docker_service( if "ID" not in service: # error while starting service raise exceptions.DirectorException( - "Error while starting service: {}".format(str(service)) + f"Error while starting service: {str(service)}" ) log.debug("Service started now waiting for it to run") @@ -792,10 +794,10 @@ async def _create_node( client: aiodocker.docker.Docker, user_id: str, project_id: str, - list_of_services: List[Dict], + list_of_services: list[dict], node_uuid: str, node_base_path: str, -) -> List[Dict]: # pylint: disable=R0913, R0915 +) -> list[dict]: # pylint: disable=R0913, R0915 log.debug( "Creating %s docker services for node %s and base path %s for user %s", len(list_of_services), @@ -834,8 +836,8 @@ async def _create_node( async def _get_service_key_version_from_docker_service( - service: Dict, -) -> Tuple[str, str]: + service: dict, +) -> tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) if not service_full_name.startswith(config.REGISTRY_PATH): raise exceptions.DirectorException( @@ -853,7 +855,7 @@ async def _get_service_key_version_from_docker_service( return service_key, service_tag -async def _get_service_basepath_from_docker_service(service: Dict) -> str: +async def _get_service_basepath_from_docker_service(service: dict) -> str: envs_list = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] envs_dict = dict(x.split("=") for x in envs_list) return envs_dict["SIMCORE_NODE_BASEPATH"] @@ -867,7 +869,7 @@ async def start_service( service_tag: str, node_uuid: str, node_base_path: str, -) -> Dict: +) -> dict: # pylint: disable=C0103 log.debug( "starting service %s:%s using uuid %s, basepath %s", @@ -912,8 +914,8 @@ async def start_service( async def _get_node_details( - app: web.Application, client: aiodocker.docker.Docker, service: Dict -) -> Dict: + app: web.Application, client: aiodocker.docker.Docker, service: dict +) -> dict: service_key, service_tag = await _get_service_key_version_from_docker_service( service ) @@ -957,7 +959,7 @@ async def _get_node_details( async def get_services_details( app: web.Application, user_id: Optional[str], study_id: Optional[str] -) -> List[Dict]: +) -> list[dict]: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: filters = ["type=main", f"swarm_stack_name={config.SWARM_STACK_NAME}"] @@ -985,7 +987,7 @@ async def get_services_details( ) from err -async def get_service_details(app: web.Application, node_uuid: str) -> Dict: +async def get_service_details(app: web.Application, node_uuid: str) -> dict: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: list_running_services_with_uuid = await client.services.list( @@ -1123,7 +1125,7 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - "Could not save state because %s is unreachable [%s]." "Resuming stop_service.", service_host_name, - err + err, ) # remove the services diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py index 3141d2f2baa..7eb34535333 100644 --- a/services/director/tests/test_utils.py +++ b/services/director/tests/test_utils.py @@ -1,4 +1,4 @@ -from datetime import datetime +from datetime import datetime, timezone import pytest from simcore_service_director.utils import parse_as_datetime @@ -26,7 +26,7 @@ def test_parse_valid_time_strings(timestr): def test_parse_invalid_timestr(): - now = datetime.utcnow() + now = datetime.now(timezone.utc).replace(tzinfo=None) invalid_timestr = "2020-10-09T12:28" # w/ default, it should NOT raise diff --git a/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py b/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py index 3802f1c192b..ad06593a1f3 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py +++ b/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py @@ -1,5 +1,5 @@ import uuid -from datetime import datetime +from datetime import datetime, timezone from pathlib import Path from typing import Callable, Optional, Union @@ -90,7 +90,7 @@ class ManifestFile(BaseLoadingModel): creation_date_utc: datetime = Field( description="UTC date and time from when the project was exported", - default_factory=datetime.utcnow, + default_factory=datetime.now(timezone.utc).replace(tzinfo=None), ) attachments: list[str] = Field( From 588f9c3e9b73a1e18f2f90cf6da25d444c16749a Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 10:28:32 +0100 Subject: [PATCH 05/12] correction of datetime UTC timezone --- .../modules/dynamic_sidecar/docker_api/_volume.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py index faf4bda3f24..d4dccc9d509 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/docker_api/_volume.py @@ -152,7 +152,7 @@ async def remove_pending_volume_removal_services( volume_removal_service["Spec"]["Labels"]["service_timeout_s"] ) created_at = to_datetime(volume_removal_services[0]["CreatedAt"]) - time_diff = datetime.now(timezone.utc) - created_at + time_diff = datetime.now(timezone.utc).replace(tzinfo=None) - created_at service_timed_out = time_diff.seconds > (service_timeout_s * 10) if service_timed_out: service_id = volume_removal_service["ID"] From f4cf1ebd26894b52d3058f944dc09ed999c50696 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 11:08:46 +0100 Subject: [PATCH 06/12] correction of datetime UTC timezone --- .../src/models_library/utils/pydantic_models_factory.py | 2 +- .../src/servicelib/long_running_tasks/_models.py | 2 +- .../src/simcore_service_webserver/exporter/formatters/models.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/models-library/src/models_library/utils/pydantic_models_factory.py b/packages/models-library/src/models_library/utils/pydantic_models_factory.py index d43e6745e98..641398dc541 100644 --- a/packages/models-library/src/models_library/utils/pydantic_models_factory.py +++ b/packages/models-library/src/models_library/utils/pydantic_models_factory.py @@ -134,7 +134,7 @@ def _extract_field_definitions( or, for complex use-cases, in the format `=`, e.g. - `foo=Field(default_factory=datetime.now(timezone.utc).replace(tzinfo=None), alias='bar')` + `foo=Field(default_factory=datetime.utcnow, alias='bar')` """ field_names = _eval_selection( diff --git a/packages/service-library/src/servicelib/long_running_tasks/_models.py b/packages/service-library/src/servicelib/long_running_tasks/_models.py index e609ff684bb..bdf223bf4df 100644 --- a/packages/service-library/src/servicelib/long_running_tasks/_models.py +++ b/packages/service-library/src/servicelib/long_running_tasks/_models.py @@ -77,7 +77,7 @@ class TrackedTask(BaseModel): ) started: datetime = Field( - default_factory=datetime.now(timezone.utc).replace(tzinfo=None) + default_factory=lambda: datetime.now(timezone.utc).replace(tzinfo=None) ) last_status_check: Optional[datetime] = Field( default=None, diff --git a/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py b/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py index ad06593a1f3..bcf692bc846 100644 --- a/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py +++ b/services/web/server/src/simcore_service_webserver/exporter/formatters/models.py @@ -90,7 +90,7 @@ class ManifestFile(BaseLoadingModel): creation_date_utc: datetime = Field( description="UTC date and time from when the project was exported", - default_factory=datetime.now(timezone.utc).replace(tzinfo=None), + default_factory=lambda: datetime.now(timezone.utc).replace(tzinfo=None), ) attachments: list[str] = Field( From 47c2fd337f621eda9b5621b8fba92589dfb145eb Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Thu, 23 Feb 2023 14:11:27 +0100 Subject: [PATCH 07/12] correction of datetime UTC timezone --- .../src/simcore_service_director/producer.py | 76 +++++++++---------- services/director/tests/test_utils.py | 4 +- 2 files changed, 39 insertions(+), 41 deletions(-) diff --git a/services/director/src/simcore_service_director/producer.py b/services/director/src/simcore_service_director/producer.py index 7573b171949..a34f06508fc 100644 --- a/services/director/src/simcore_service_director/producer.py +++ b/services/director/src/simcore_service_director/producer.py @@ -2,12 +2,12 @@ import json import logging import re -from datetime import datetime, timedelta, timezone +from datetime import datetime, timedelta from distutils.version import StrictVersion from enum import Enum from http import HTTPStatus from pprint import pformat -from typing import Optional +from typing import Dict, List, Optional, Tuple import aiodocker import aiohttp @@ -50,7 +50,7 @@ class ServiceState(Enum): FAILED = "failed" -async def _create_auth() -> dict[str, str]: +async def _create_auth() -> Dict[str, str]: return {"username": config.REGISTRY_USER, "password": config.REGISTRY_PW} @@ -74,12 +74,12 @@ async def _check_node_uuid_available( log.debug("UUID %s is free", node_uuid) -def _check_setting_correctness(setting: dict) -> None: +def _check_setting_correctness(setting: Dict) -> None: if "name" not in setting or "type" not in setting or "value" not in setting: raise exceptions.DirectorException("Invalid setting in %s" % setting) -def _parse_mount_settings(settings: list[dict]) -> list[dict]: +def _parse_mount_settings(settings: List[Dict]) -> List[Dict]: mounts = [] for s in settings: log.debug("Retrieved mount settings %s", s) @@ -103,7 +103,7 @@ def _parse_mount_settings(settings: list[dict]) -> list[dict]: return mounts -def _parse_env_settings(settings: list[str]) -> dict: +def _parse_env_settings(settings: List[str]) -> Dict: envs = {} for s in settings: log.debug("Retrieved env settings %s", s) @@ -119,7 +119,7 @@ def _parse_env_settings(settings: list[str]) -> dict: async def _read_service_settings( app: web.Application, key: str, tag: str, settings_name: str -) -> dict: +) -> Dict: image_labels = await registry_proxy.get_image_labels(app, key, tag) settings = ( json.loads(image_labels[settings_name]) if settings_name in image_labels else {} @@ -141,7 +141,7 @@ async def _create_docker_service_params( project_id: str, node_base_path: str, internal_network_id: Optional[str], -) -> dict: +) -> Dict: # pylint: disable=too-many-statements service_parameters_labels = await _read_service_settings( app, service_key, service_tag, config.SERVICE_RUNTIME_SETTINGS @@ -331,7 +331,7 @@ async def _create_docker_service_params( ) elif param["name"] == "mount": log.debug("Found mount parameter %s", param["value"]) - mount_settings: list[dict] = _parse_mount_settings(param["value"]) + mount_settings: List[Dict] = _parse_mount_settings(param["value"]) if mount_settings: docker_params["task_template"]["ContainerSpec"]["Mounts"].extend( mount_settings @@ -371,7 +371,7 @@ async def _create_docker_service_params( return docker_params -def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: +def _get_service_entrypoint(service_boot_parameters_labels: Dict) -> str: log.debug("Getting service entrypoint") for param in service_boot_parameters_labels: _check_setting_correctness(param) @@ -381,10 +381,10 @@ def _get_service_entrypoint(service_boot_parameters_labels: dict) -> str: return "" -async def _get_swarm_network(client: aiodocker.docker.Docker) -> dict: +async def _get_swarm_network(client: aiodocker.docker.Docker) -> Dict: network_name = "_default" if config.SIMCORE_SERVICES_NETWORK_NAME: - network_name = f"{config.SIMCORE_SERVICES_NETWORK_NAME}" + network_name = "{}".format(config.SIMCORE_SERVICES_NETWORK_NAME) # try to find the network name (usually named STACKNAME_default) networks = [ x @@ -403,8 +403,8 @@ async def _get_swarm_network(client: aiodocker.docker.Docker) -> dict: async def _get_docker_image_port_mapping( - service: dict, -) -> tuple[Optional[str], Optional[int]]: + service: Dict, +) -> Tuple[Optional[str], Optional[int]]: log.debug("getting port published by service: %s", service["Spec"]["Name"]) published_ports = [] @@ -438,7 +438,7 @@ async def _get_docker_image_port_mapping( async def _pass_port_to_service( service_name: str, port: str, - service_boot_parameters_labels: dict, + service_boot_parameters_labels: Dict, session: ClientSession, ) -> None: for param in service_boot_parameters_labels: @@ -525,18 +525,16 @@ async def _remove_overlay_network_of_swarm( async def _get_service_state( - client: aiodocker.docker.Docker, service: dict -) -> tuple[ServiceState, str]: + client: aiodocker.docker.Docker, service: Dict +) -> Tuple[ServiceState, str]: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] log.debug("Getting service %s state", service_name) tasks = await client.tasks.list(filters={"service": service_name}) async def _wait_for_tasks(tasks): - task_started_time = datetime.now(timezone.utc).replace(tzinfo=None) - while ( - datetime.now(timezone.utc).replace(tzinfo=None) - task_started_time - ) < timedelta(seconds=20): + task_started_time = datetime.utcnow() + while (datetime.utcnow() - task_started_time) < timedelta(seconds=20): tasks = await client.tasks.list(filters={"service": service_name}) # only keep the ones with the right service ID (we're being a bit picky maybe) tasks = [x for x in tasks if x["ServiceID"] == service["ID"]] @@ -581,7 +579,7 @@ async def _wait_for_tasks(tasks): elif task_state in ("ready", "starting"): last_task_state = ServiceState.STARTING elif task_state in ("running"): - now = datetime.now(timezone.utc).replace(tzinfo=None) + now = datetime.utcnow() # NOTE: task_state_update_time is only used to discrimitate between 'starting' and 'running' task_state_update_time = parse_as_datetime( last_task["Status"]["Timestamp"], default=now @@ -603,7 +601,7 @@ async def _wait_for_tasks(tasks): async def _wait_until_service_running_or_failed( - client: aiodocker.docker.Docker, service: dict, node_uuid: str + client: aiodocker.docker.Docker, service: Dict, node_uuid: str ) -> None: # some times one has to wait until the task info is filled service_name = service["Spec"]["Name"] @@ -631,7 +629,7 @@ async def _wait_until_service_running_or_failed( async def _get_repos_from_key( app: web.Application, service_key: str -) -> dict[str, list[dict]]: +) -> Dict[str, List[Dict]]: # get the available image for the main service (syntax is image:tag) list_of_images = { service_key: await registry_proxy.list_image_tags(app, service_key) @@ -651,7 +649,7 @@ async def _get_repos_from_key( async def _get_dependant_repos( app: web.Application, service_key: str, service_tag: str -) -> list[dict]: +) -> List[Dict]: list_of_images = await _get_repos_from_key(app, service_key) tag = await _find_service_tag(list_of_images, service_key, service_tag) # look for dependencies @@ -668,7 +666,7 @@ async def _get_dependant_repos( async def _find_service_tag( - list_of_images: dict, service_key: str, service_tag: str + list_of_images: Dict, service_key: str, service_tag: str ) -> str: if not service_key in list_of_images: raise exceptions.ServiceNotAvailableError( @@ -705,7 +703,7 @@ async def _start_docker_service( node_uuid: str, node_base_path: str, internal_network_id: Optional[str], -) -> dict: # pylint: disable=R0913 +) -> Dict: # pylint: disable=R0913 service_parameters = await _create_docker_service_params( app, client, @@ -730,7 +728,7 @@ async def _start_docker_service( if "ID" not in service: # error while starting service raise exceptions.DirectorException( - f"Error while starting service: {str(service)}" + "Error while starting service: {}".format(str(service)) ) log.debug("Service started now waiting for it to run") @@ -794,10 +792,10 @@ async def _create_node( client: aiodocker.docker.Docker, user_id: str, project_id: str, - list_of_services: list[dict], + list_of_services: List[Dict], node_uuid: str, node_base_path: str, -) -> list[dict]: # pylint: disable=R0913, R0915 +) -> List[Dict]: # pylint: disable=R0913, R0915 log.debug( "Creating %s docker services for node %s and base path %s for user %s", len(list_of_services), @@ -836,8 +834,8 @@ async def _create_node( async def _get_service_key_version_from_docker_service( - service: dict, -) -> tuple[str, str]: + service: Dict, +) -> Tuple[str, str]: service_full_name = str(service["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"]) if not service_full_name.startswith(config.REGISTRY_PATH): raise exceptions.DirectorException( @@ -855,7 +853,7 @@ async def _get_service_key_version_from_docker_service( return service_key, service_tag -async def _get_service_basepath_from_docker_service(service: dict) -> str: +async def _get_service_basepath_from_docker_service(service: Dict) -> str: envs_list = service["Spec"]["TaskTemplate"]["ContainerSpec"]["Env"] envs_dict = dict(x.split("=") for x in envs_list) return envs_dict["SIMCORE_NODE_BASEPATH"] @@ -869,7 +867,7 @@ async def start_service( service_tag: str, node_uuid: str, node_base_path: str, -) -> dict: +) -> Dict: # pylint: disable=C0103 log.debug( "starting service %s:%s using uuid %s, basepath %s", @@ -914,8 +912,8 @@ async def start_service( async def _get_node_details( - app: web.Application, client: aiodocker.docker.Docker, service: dict -) -> dict: + app: web.Application, client: aiodocker.docker.Docker, service: Dict +) -> Dict: service_key, service_tag = await _get_service_key_version_from_docker_service( service ) @@ -959,7 +957,7 @@ async def _get_node_details( async def get_services_details( app: web.Application, user_id: Optional[str], study_id: Optional[str] -) -> list[dict]: +) -> List[Dict]: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: filters = ["type=main", f"swarm_stack_name={config.SWARM_STACK_NAME}"] @@ -987,7 +985,7 @@ async def get_services_details( ) from err -async def get_service_details(app: web.Application, node_uuid: str) -> dict: +async def get_service_details(app: web.Application, node_uuid: str) -> Dict: async with docker_utils.docker_client() as client: # pylint: disable=not-async-context-manager try: list_running_services_with_uuid = await client.services.list( @@ -1125,7 +1123,7 @@ async def stop_service(app: web.Application, node_uuid: str, save_state: bool) - "Could not save state because %s is unreachable [%s]." "Resuming stop_service.", service_host_name, - err, + err ) # remove the services diff --git a/services/director/tests/test_utils.py b/services/director/tests/test_utils.py index 7eb34535333..3141d2f2baa 100644 --- a/services/director/tests/test_utils.py +++ b/services/director/tests/test_utils.py @@ -1,4 +1,4 @@ -from datetime import datetime, timezone +from datetime import datetime import pytest from simcore_service_director.utils import parse_as_datetime @@ -26,7 +26,7 @@ def test_parse_valid_time_strings(timestr): def test_parse_invalid_timestr(): - now = datetime.now(timezone.utc).replace(tzinfo=None) + now = datetime.utcnow() invalid_timestr = "2020-10-09T12:28" # w/ default, it should NOT raise From 9035e95e3f5768fb37b6f4af261c5dbe0f6a9b5c Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Fri, 24 Feb 2023 09:55:20 +0100 Subject: [PATCH 08/12] making working with timezone.utc consistent --- .../dynamic_scaling_core.py | 3 ++- .../src/simcore_service_catalog/api/routes/health.py | 2 +- .../api/routes/health.py | 2 +- .../simcore_service_director_v2/api/routes/health.py | 4 +++- .../modules/db/repositories/comp_runs.py | 4 ++-- .../modules/db/repositories/comp_tasks.py | 2 +- services/director-v2/tests/unit/with_dbs/conftest.py | 2 +- .../src/simcore_service_invitations/api/_health.py | 2 +- .../simcore_service_invitations/api/_invitations.py | 2 +- .../src/simcore_service_invitations/invitations.py | 2 +- services/invitations/tests/unit/test_invitations.py | 4 ++-- .../storage/src/simcore_service_storage/models.py | 2 +- .../src/simcore_service_storage/simcore_s3_dsm.py | 4 ++-- services/storage/tests/unit/test_dsm_dsmcleaner.py | 12 ++++++++---- services/storage/tests/unit/test_utils.py | 2 +- .../server/tests/unit/isolated/test_users_models.py | 2 +- .../server/tests/unit/with_dbs/02/test_project_db.py | 2 +- 17 files changed, 30 insertions(+), 23 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py index e05891bf24a..b3ba987f661 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py @@ -109,7 +109,8 @@ async def _find_terminateable_instances( for instance in cluster.drained_nodes: # NOTE: AWS price is hourly based (e.g. same price for a machine used 2 minutes or 1 hour, so we wait until 55 minutes) elapsed_time_since_launched = ( - datetime.now(timezone.utc) - instance.ec2_instance.launch_time + datetime.now(timezone.utc).replace(tzinfo=None) + - instance.ec2_instance.launch_time ) elapsed_time_since_full_hour = elapsed_time_since_launched % timedelta(hours=1) if ( diff --git a/services/catalog/src/simcore_service_catalog/api/routes/health.py b/services/catalog/src/simcore_service_catalog/api/routes/health.py index 8f636f448ce..b2051dd6c13 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/health.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/health.py @@ -7,4 +7,4 @@ @router.get("/", include_in_schema=False) async def check_service_health(): - return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py index 24404c1f3de..cfb7cd0a33f 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py @@ -23,7 +23,7 @@ status_code=status.HTTP_200_OK, ) async def get_service_alive(): - return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" @router.get("/ready", status_code=status.HTTP_200_OK, response_model=AppStatusCheck) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/health.py b/services/director-v2/src/simcore_service_director_v2/api/routes/health.py index 77a63d204eb..3aec6901fbd 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/health.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/health.py @@ -7,4 +7,6 @@ @router.get("/") async def check_service_health() -> dict[str, str]: - return {"timestamp": f"{__name__}@{datetime.now(timezone.utc).isoformat()}"} + return { + "timestamp": f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" + } diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 62c0829a9e7..1f2fad7634b 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -99,7 +99,7 @@ async def create( cluster_id=cluster_id if cluster_id != DEFAULT_CLUSTER_ID else None, iteration=iteration, result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED], - started=datetime.now(timezone.utc), + started=datetime.now(timezone.utc).replace(tzinfo=None), ) .returning(literal_column("*")) ) @@ -133,7 +133,7 @@ async def set_run_result( ) -> Optional[CompRunsAtDB]: values = {"result": RUNNING_STATE_TO_DB[result_state]} if final_state: - values.update({"ended": datetime.now(timezone.utc)}) + values.update({"ended": datetime.now(timezone.utc).replace(tzinfo=None)}) return await self.update( user_id, project_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py index 5a1cdcaaeab..0558aa7ee53 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py @@ -94,7 +94,7 @@ async def _generate_tasks_list_from_project( inputs=node.inputs, outputs=node.outputs, image=image, - submit=datetime.now(timezone.utc), + submit=datetime.now(timezone.utc).replace(tzinfo=None), state=task_state, internal_id=internal_id, node_class=node_class, diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 7a5648cc910..2f9f5864aae 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -99,7 +99,7 @@ def creator( ), # type: ignore "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, - "submit": datetime.now(timezone.utc), + "submit": datetime.now(timezone.utc).replace(tzinfo=None), "job_id": generate_dask_job_id( service_key=node_data.key, service_version=node_data.version, diff --git a/services/invitations/src/simcore_service_invitations/api/_health.py b/services/invitations/src/simcore_service_invitations/api/_health.py index 611f200e73e..79b24ceb468 100644 --- a/services/invitations/src/simcore_service_invitations/api/_health.py +++ b/services/invitations/src/simcore_service_invitations/api/_health.py @@ -15,4 +15,4 @@ @router.get("/", response_class=PlainTextResponse) async def healthcheck(): - return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" diff --git a/services/invitations/src/simcore_service_invitations/api/_invitations.py b/services/invitations/src/simcore_service_invitations/api/_invitations.py index c2577cdc16f..5207001d8df 100644 --- a/services/invitations/src/simcore_service_invitations/api/_invitations.py +++ b/services/invitations/src/simcore_service_invitations/api/_invitations.py @@ -90,7 +90,7 @@ async def create_invitation( ) invitation = _InvitationContentAndLink( invitation_url=invitation_link, - created=datetime.now(timezone.utc), + created=datetime.now(timezone.utc).replace(tzinfo=None), **invitation_inputs.dict(), ) diff --git a/services/invitations/src/simcore_service_invitations/invitations.py b/services/invitations/src/simcore_service_invitations/invitations.py index c4f5c5bb065..721b750e473 100644 --- a/services/invitations/src/simcore_service_invitations/invitations.py +++ b/services/invitations/src/simcore_service_invitations/invitations.py @@ -146,7 +146,7 @@ def _create_invitation_code( # builds content content = InvitationContent( **invitation_data.dict(), - created=datetime.now(timezone.utc), + created=datetime.now(timezone.utc).replace(tzinfo=None), ) content_jsonstr: str = _ContentWithShortNames.serialize(content) diff --git a/services/invitations/tests/unit/test_invitations.py b/services/invitations/tests/unit/test_invitations.py index 76fc2751448..9ff79b4aec8 100644 --- a/services/invitations/tests/unit/test_invitations.py +++ b/services/invitations/tests/unit/test_invitations.py @@ -39,7 +39,7 @@ def test_import_and_export_invitation_alias_by_alias( ): expected_content = InvitationContent( **invitation_data.dict(), - created=datetime.now(timezone.utc), + created=datetime.now(timezone.utc).replace(tzinfo=None), ) raw_data = _ContentWithShortNames.serialize(expected_content) @@ -52,7 +52,7 @@ def test_export_by_alias_produces_smaller_strings( ): content = InvitationContent( **invitation_data.dict(), - created=datetime.now(timezone.utc), + created=datetime.now(timezone.utc).replace(tzinfo=None), ) raw_data = _ContentWithShortNames.serialize(content) diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index eaa1fc618e7..37b0c0c3348 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -91,7 +91,7 @@ def from_simcore_node( ): parts = file_id.split("/") - now = datetime.now(timezone.utc) + now = datetime.now(timezone.utc).replace(tzinfo=None) fmd_kwargs = { "file_uuid": file_id, "location_id": location_id, diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index b1d416bdcb6..4a7a7ce4130 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -586,7 +586,7 @@ async def _clean_expired_uploads(self): 1. will try to update the entry from S3 backend if exists 2. will delete the entry if nothing exists in S3 backend. """ - now = datetime.now(timezone.utc) + now = datetime.now(timezone.utc).replace(tzinfo=None) async with self.engine.acquire() as conn: list_of_expired_uploads = await db_file_meta_data.list_fmds( conn, expired_after=now @@ -821,7 +821,7 @@ async def _create_fmd_for_upload( file_id: StorageFileID, upload_id: Optional[UploadID], ) -> FileMetaDataAtDB: - now = datetime.now(timezone.utc) + now = datetime.now(timezone.utc).replace(tzinfo=None) upload_expiration_date = now + timedelta( seconds=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS ) diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index ee90143dd2b..0fce1950da7 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -120,7 +120,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == simcore_file_id) - .values(upload_expires_at=datetime.now(timezone.utc)) + .values(upload_expires_at=datetime.now(timezone.utc).replace(tzinfo=None)) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -195,7 +195,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == file_id) - .values(upload_expires_at=datetime.now(timezone.utc)) + .values(upload_expires_at=datetime.now(timezone.utc).replace(tzinfo=None)) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -230,7 +230,9 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation ): """This test reproduces what create_file_upload_links in dsm does, but running the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.now(timezone.utc) + timedelta(minutes=5) + later_than_now = datetime.now(timezone.utc).replace(tzinfo=None) + timedelta( + minutes=5 + ) fmd = FileMetaData.from_simcore_node( user_id, simcore_file_id, @@ -289,7 +291,9 @@ async def test_clean_expired_uploads_cleans_dangling_multipart_uploads_if_no_cor ): """This test reproduces what create_file_upload_links in dsm does, but running the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.now(timezone.utc) + timedelta(minutes=5) + later_than_now = datetime.now(timezone.utc).replace(tzinfo=None) + timedelta( + minutes=5 + ) fmd = FileMetaData.from_simcore_node( user_id, simcore_file_id, diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py index ee0d8753cd9..a871fd5ca61 100644 --- a/services/storage/tests/unit/test_utils.py +++ b/services/storage/tests/unit/test_utils.py @@ -54,7 +54,7 @@ async def test_download_files(tmpdir): random.randint(1, 1000000), "some_valid_entity_tag", None, - datetime.now(timezone.utc), + datetime.now(timezone.utc).replace(tzinfo=None), False, ), (random.randint(1, 1000000), "some_valid_entity_tag", None, None, True), diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index 24d01bcf66d..9d48c492184 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -50,7 +50,7 @@ def test_user_models_examples( def test_profile_get_expiration_date(faker: Faker): - fake_expiration = datetime.now(timezone.utc) + fake_expiration = datetime.now(timezone.utc).replace(tzinfo=None) profile = ProfileGet( id=1, login=faker.email(), role=UserRole.ADMIN, expiration_date=fake_expiration diff --git a/services/web/server/tests/unit/with_dbs/02/test_project_db.py b/services/web/server/tests/unit/with_dbs/02/test_project_db.py index b190d486d7f..a9c4fb910bc 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/02/test_project_db.py @@ -90,7 +90,7 @@ def test_convert_to_schema_names(fake_project: dict[str, Any]): assert col is not None # test date time conversion - date = datetime.now(timezone.utc) + date = datetime.now(timezone.utc).replace(tzinfo=None) db_entries["creation_date"] = date schema_entries = convert_to_schema_names(db_entries, fake_project["prjOwner"]) assert "creationDate" in schema_entries From fddd13793f97a4c65663b752ed5e24470882e342 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Fri, 24 Feb 2023 11:51:06 +0100 Subject: [PATCH 09/12] correction of failing autoscaling unit tests --- .../tests/unit/test_dynamic_scaling_core.py | 44 +++++++++---------- 1 file changed, 21 insertions(+), 23 deletions(-) diff --git a/services/autoscaling/tests/unit/test_dynamic_scaling_core.py b/services/autoscaling/tests/unit/test_dynamic_scaling_core.py index 82dc5012455..13b1dd0db66 100644 --- a/services/autoscaling/tests/unit/test_dynamic_scaling_core.py +++ b/services/autoscaling/tests/unit/test_dynamic_scaling_core.py @@ -8,10 +8,10 @@ import asyncio import base64 import dataclasses -import datetime import pickle import warnings from dataclasses import dataclass +from datetime import datetime, timedelta, timezone from typing import Any, AsyncIterator, Awaitable, Callable, Iterator from unittest import mock @@ -768,7 +768,7 @@ async def test__find_terminateable_nodes_with_drained_host( assert app_settings.AUTOSCALING_EC2_INSTANCES assert ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - > datetime.timedelta(seconds=10) + > timedelta(seconds=10) ), "this tests relies on the fact that the time before termination is above 10 seconds" # if the instance started just about now, then it should not be terminateable @@ -777,7 +777,7 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) ), ) ], @@ -785,7 +785,7 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) ), ) ], @@ -803,10 +803,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - datetime.timedelta(days=21) - + datetime.timedelta(seconds=10) + - timedelta(days=21) + + timedelta(seconds=10) ), ) ], @@ -814,10 +814,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - datetime.timedelta(days=21) - + datetime.timedelta(seconds=10) + - timedelta(days=21) + + timedelta(seconds=10) ), ) ], @@ -835,10 +835,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - datetime.timedelta(days=21) - - datetime.timedelta(seconds=10), + - timedelta(days=21) + - timedelta(seconds=10), ), ) ], @@ -846,10 +846,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - datetime.timedelta(days=21) - - datetime.timedelta(seconds=10), + - timedelta(days=21) + - timedelta(seconds=10), ), ) ], @@ -873,22 +873,20 @@ def create_associated_instance( assert app_settings.AUTOSCALING_EC2_INSTANCES assert ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - > datetime.timedelta(seconds=10) + > timedelta(seconds=10) ), "this tests relies on the fact that the time before termination is above 10 seconds" def _creator(node: Node, terminateable_time: bool) -> AssociatedInstance: assert app_settings.AUTOSCALING_EC2_INSTANCES seconds_delta = ( - -datetime.timedelta(seconds=10) - if terminateable_time - else datetime.timedelta(seconds=10) + -timedelta(seconds=10) if terminateable_time else timedelta(seconds=10) ) return AssociatedInstance( node, fake_ec2_instance_data( - launch_time=datetime.datetime.now(datetime.timezone.utc) + launch_time=datetime.now(timezone.utc).replace(tzinfo=None) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - datetime.timedelta( + - timedelta( days=faker.pyint(min_value=0, max_value=100), hours=faker.pyint(min_value=0, max_value=100), ) @@ -932,7 +930,7 @@ async def test__try_scale_down_cluster( assert app_settings.AUTOSCALING_EC2_INSTANCES assert ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - > datetime.timedelta(seconds=10) + > timedelta(seconds=10) ), "this tests relies on the fact that the time before termination is above 10 seconds" active_cluster = cluster( From c4c05e6d2068f2bfc1369a8af38a20ea1dc2d08d Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Fri, 24 Feb 2023 11:51:06 +0100 Subject: [PATCH 10/12] Revert "correction of failing autoscaling unit tests" This reverts commit fddd13793f97a4c65663b752ed5e24470882e342. --- .../tests/unit/test_dynamic_scaling_core.py | 44 ++++++++++--------- 1 file changed, 23 insertions(+), 21 deletions(-) diff --git a/services/autoscaling/tests/unit/test_dynamic_scaling_core.py b/services/autoscaling/tests/unit/test_dynamic_scaling_core.py index 13b1dd0db66..82dc5012455 100644 --- a/services/autoscaling/tests/unit/test_dynamic_scaling_core.py +++ b/services/autoscaling/tests/unit/test_dynamic_scaling_core.py @@ -8,10 +8,10 @@ import asyncio import base64 import dataclasses +import datetime import pickle import warnings from dataclasses import dataclass -from datetime import datetime, timedelta, timezone from typing import Any, AsyncIterator, Awaitable, Callable, Iterator from unittest import mock @@ -768,7 +768,7 @@ async def test__find_terminateable_nodes_with_drained_host( assert app_settings.AUTOSCALING_EC2_INSTANCES assert ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - > timedelta(seconds=10) + > datetime.timedelta(seconds=10) ), "this tests relies on the fact that the time before termination is above 10 seconds" # if the instance started just about now, then it should not be terminateable @@ -777,7 +777,7 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) ), ) ], @@ -785,7 +785,7 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) ), ) ], @@ -803,10 +803,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - timedelta(days=21) - + timedelta(seconds=10) + - datetime.timedelta(days=21) + + datetime.timedelta(seconds=10) ), ) ], @@ -814,10 +814,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - timedelta(days=21) - + timedelta(seconds=10) + - datetime.timedelta(days=21) + + datetime.timedelta(seconds=10) ), ) ], @@ -835,10 +835,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - timedelta(days=21) - - timedelta(seconds=10), + - datetime.timedelta(days=21) + - datetime.timedelta(seconds=10), ), ) ], @@ -846,10 +846,10 @@ async def test__find_terminateable_nodes_with_drained_host( AssociatedInstance( drained_host_node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - timedelta(days=21) - - timedelta(seconds=10), + - datetime.timedelta(days=21) + - datetime.timedelta(seconds=10), ), ) ], @@ -873,20 +873,22 @@ def create_associated_instance( assert app_settings.AUTOSCALING_EC2_INSTANCES assert ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - > timedelta(seconds=10) + > datetime.timedelta(seconds=10) ), "this tests relies on the fact that the time before termination is above 10 seconds" def _creator(node: Node, terminateable_time: bool) -> AssociatedInstance: assert app_settings.AUTOSCALING_EC2_INSTANCES seconds_delta = ( - -timedelta(seconds=10) if terminateable_time else timedelta(seconds=10) + -datetime.timedelta(seconds=10) + if terminateable_time + else datetime.timedelta(seconds=10) ) return AssociatedInstance( node, fake_ec2_instance_data( - launch_time=datetime.now(timezone.utc).replace(tzinfo=None) + launch_time=datetime.datetime.now(datetime.timezone.utc) - app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - - timedelta( + - datetime.timedelta( days=faker.pyint(min_value=0, max_value=100), hours=faker.pyint(min_value=0, max_value=100), ) @@ -930,7 +932,7 @@ async def test__try_scale_down_cluster( assert app_settings.AUTOSCALING_EC2_INSTANCES assert ( app_settings.AUTOSCALING_EC2_INSTANCES.EC2_INSTANCES_TIME_BEFORE_TERMINATION - > timedelta(seconds=10) + > datetime.timedelta(seconds=10) ), "this tests relies on the fact that the time before termination is above 10 seconds" active_cluster = cluster( From a4032a84a01919b73d6d52333bd297185fdffe8d Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Fri, 24 Feb 2023 11:49:43 +0100 Subject: [PATCH 11/12] Revert "Merge branch 'master' into maintenance/propagating-UTC-timezone-everywhere" This reverts commit 9bce1642becacd6e900e2cea498ffd6ed366ae4f. --- .../dynamic_scaling_core.py | 9 ++-- .../modules/ec2.py | 53 ++++++++++++++++--- .../utils/dynamic_scaling.py | 10 +--- .../tests/unit/test_dynamic_scaling_core.py | 32 +---------- .../tests/unit/test_modules_ec2.py | 39 +++++++++++--- 5 files changed, 85 insertions(+), 58 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py index 4e13692be67..b3ba987f661 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py @@ -3,6 +3,7 @@ import dataclasses import itertools import logging +from collections import defaultdict from datetime import datetime, timedelta, timezone from typing import cast @@ -272,7 +273,7 @@ async def _find_needed_instances( f"{task.Name or 'unknown task name'}:{task.ServiceID or 'unknown service ID'}", ) - num_instances_per_type = collections.defaultdict( + num_instances_per_type = defaultdict( int, collections.Counter(t for t, _ in needed_new_instance_to_tasks) ) @@ -454,15 +455,15 @@ async def _analyze_current_cluster(app: FastAPI) -> Cluster: node_labels=app_settings.AUTOSCALING_NODES_MONITORING.NODES_MONITORING_NODE_LABELS, ) - # get the EC2 instances we have + # get the whatever EC2 instances we have existing_ec2_instances = await get_ec2_client(app).get_instances( app_settings.AUTOSCALING_EC2_INSTANCES, - ec2.get_ec2_tags(app_settings), + list(ec2.get_ec2_tags(app_settings).keys()), ) terminated_ec2_instances = await get_ec2_client(app).get_instances( app_settings.AUTOSCALING_EC2_INSTANCES, - ec2.get_ec2_tags(app_settings), + list(ec2.get_ec2_tags(app_settings).keys()), state_names=["terminated"], ) diff --git a/services/autoscaling/src/simcore_service_autoscaling/modules/ec2.py b/services/autoscaling/src/simcore_service_autoscaling/modules/ec2.py index 330bbe9f814..5241df11f30 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/modules/ec2.py +++ b/services/autoscaling/src/simcore_service_autoscaling/modules/ec2.py @@ -15,7 +15,6 @@ from tenacity.wait import wait_random_exponential from types_aiobotocore_ec2 import EC2Client from types_aiobotocore_ec2.literals import InstanceStateNameType, InstanceTypeType -from types_aiobotocore_ec2.type_defs import FilterTypeDef from ..core.errors import ( ConfigurationError, @@ -99,7 +98,9 @@ async def start_aws_instance( msg=f"launching {number_of_instances} AWS instance(s) {instance_type} with {tags=}", ): # first check the max amount is not already reached - current_instances = await self.get_instances(instance_settings, tags) + current_instances = await self.get_instances( + instance_settings, list(tags.keys()) + ) if ( len(current_instances) + number_of_instances > instance_settings.EC2_INSTANCES_MAX_INSTANCES @@ -161,7 +162,7 @@ async def start_aws_instance( async def get_instances( self, instance_settings: EC2InstancesSettings, - tags: dict[str, str], + tag_keys: list[str], *, state_names: Optional[list[InstanceStateNameType]] = None, ) -> list[EC2InstanceData]: @@ -170,16 +171,14 @@ async def get_instances( if state_names is None: state_names = ["pending", "running"] - filters: list[FilterTypeDef] = [ + filters = [ { "Name": "key-name", "Values": [instance_settings.EC2_INSTANCES_KEY_NAME], }, {"Name": "instance-state-name", "Values": state_names}, ] - filters.extend( - [{"Name": f"tag:{key}", "Values": [value]} for key, value in tags.items()] - ) + filters.extend([{"Name": "tag-key", "Values": [t]} for t in tag_keys]) instances = await self.client.describe_instances(Filters=filters) all_instances = [] @@ -204,6 +203,46 @@ async def get_instances( logger.debug("received: %s", f"{all_instances=}") return all_instances + async def get_running_instance( + self, + instance_settings: EC2InstancesSettings, + tag_keys: list[str], + instance_host_name: str, + ) -> EC2InstanceData: + filters = [ + { + "Name": "key-name", + "Values": [instance_settings.EC2_INSTANCES_KEY_NAME], + }, + {"Name": "instance-state-name", "Values": ["running"]}, + { + "Name": "network-interface.private-dns-name", + "Values": [f"{instance_host_name}.ec2.internal"], + }, + ] + filters.extend([{"Name": "tag-key", "Values": [t]} for t in tag_keys]) + instances = await self.client.describe_instances(Filters=filters) + if not instances["Reservations"]: + # NOTE: wrong hostname, or not running, or wrong usage + raise Ec2InstanceNotFoundError() + + # NOTE: since the hostname is unique, there is only one instance here + assert "Instances" in instances["Reservations"][0] # nosec + instance = instances["Reservations"][0]["Instances"][0] + assert "LaunchTime" in instance # nosec + assert "InstanceId" in instance # nosec + assert "PrivateDnsName" in instance # nosec + assert "InstanceType" in instance # nosec + assert "State" in instance # nosec + assert "Name" in instance["State"] # nosec + return EC2InstanceData( + launch_time=instance["LaunchTime"], + id=instance["InstanceId"], + aws_private_dns=instance["PrivateDnsName"], + type=instance["InstanceType"], + state=instance["State"]["Name"], + ) + async def terminate_instances(self, instance_datas: list[EC2InstanceData]) -> None: try: await self.client.terminate_instances( diff --git a/services/autoscaling/src/simcore_service_autoscaling/utils/dynamic_scaling.py b/services/autoscaling/src/simcore_service_autoscaling/utils/dynamic_scaling.py index be84bd7b861..7d9449512b6 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/utils/dynamic_scaling.py +++ b/services/autoscaling/src/simcore_service_autoscaling/utils/dynamic_scaling.py @@ -85,13 +85,6 @@ def try_assigning_task_to_instances( return False -_TIME_FORMAT = "{:02d}:{:02d}" # format for minutes:seconds - - -def _format_delta(delta: datetime.timedelta) -> str: - return _TIME_FORMAT.format(delta.seconds // 60, delta.seconds % 60) - - async def try_assigning_task_to_pending_instances( app: FastAPI, pending_task: Task, @@ -120,11 +113,10 @@ async def try_assigning_task_to_pending_instances( estimated_time_to_completion = ( instance.launch_time + instance_max_time_to_start - now ) - await log_tasks_message( app, [pending_task], - f"adding machines to the cluster (time waiting: {_format_delta(time_since_launch)}, est. remaining time: {_format_delta(estimated_time_to_completion)})...please wait...", + f"adding machines to the cluster (time waiting: {time_since_launch}, est. remaining time: {estimated_time_to_completion})...please wait...", ) await progress_tasks_message( app, diff --git a/services/autoscaling/tests/unit/test_dynamic_scaling_core.py b/services/autoscaling/tests/unit/test_dynamic_scaling_core.py index 82dc5012455..f6651891e89 100644 --- a/services/autoscaling/tests/unit/test_dynamic_scaling_core.py +++ b/services/autoscaling/tests/unit/test_dynamic_scaling_core.py @@ -9,8 +9,6 @@ import base64 import dataclasses import datetime -import pickle -import warnings from dataclasses import dataclass from typing import Any, AsyncIterator, Awaitable, Callable, Iterator from unittest import mock @@ -263,35 +261,8 @@ async def test_cluster_scaling_from_labelled_services_with_no_services_does_noth ) -@pytest.fixture -def patch_get_ec2_tags(mocker: MockerFixture, faker: Faker) -> Iterator[mock.Mock]: - # NOTE: this is needed because of a bug in Moto - # https://github.com/getmoto/moto/issues/5966 - warnings.warn( - "patching get_ec2_tags due to issue https://github.com/getmoto/moto/issues/5966 in moto library...", - UserWarning, - ) - - def _json_without_square_brackets(obj) -> str: - return str(pickle.dumps(obj)) - - mocked_terminate_instance = mocker.patch( - "simcore_service_autoscaling.utils.ec2.get_ec2_tags", - autospec=True, - return_value={ - "io.simcore.autoscaling.version": faker.pystr(), - "io.simcore.autoscaling.monitored_nodes_labels": faker.pystr(), - "io.simcore.autoscaling.monitored_services_labels": faker.pystr(), - # NOTE: this one gets special treatment in AWS GUI and is applied to the name of the instance - "Name": faker.pystr(), - }, - ) - yield mocked_terminate_instance - - async def test_cluster_scaling_from_labelled_services_with_no_services_and_machine_buffer_starts_expected_machines( minimal_configuration: None, - patch_get_ec2_tags: mock.MagicMock, mock_machines_buffer: int, app_settings: ApplicationSettings, initialized_app: FastAPI, @@ -450,7 +421,6 @@ async def _assert_ec2_instances( async def test_cluster_scaling_up( minimal_configuration: None, - patch_get_ec2_tags: mock.MagicMock, service_monitored_labels: dict[DockerLabelKey, str], app_settings: ApplicationSettings, initialized_app: FastAPI, @@ -467,6 +437,7 @@ async def test_cluster_scaling_up( mock_set_node_availability: mock.Mock, # mock_cluster_used_resources: mock.Mock, mock_compute_node_used_resources: mock.Mock, + faker: Faker, ): # we have nothing running now all_instances = await ec2_client.describe_instances() @@ -604,6 +575,7 @@ async def test_cluster_scaling_up_starts_multiple_instances( mock_rabbitmq_post_message: mock.Mock, mock_find_node_with_name: mock.Mock, mock_set_node_availability: mock.Mock, + mocker: MockerFixture, ): # we have nothing running now all_instances = await ec2_client.describe_instances() diff --git a/services/autoscaling/tests/unit/test_modules_ec2.py b/services/autoscaling/tests/unit/test_modules_ec2.py index f412e40f36d..398c2788386 100644 --- a/services/autoscaling/tests/unit/test_modules_ec2.py +++ b/services/autoscaling/tests/unit/test_modules_ec2.py @@ -222,7 +222,31 @@ async def test_start_aws_instance_is_limited_in_number_of_instances( ) -async def test_get_instances( +async def test_get_running_instance_raises_if_not_found( + mocked_aws_server_envs: None, + aws_vpc_id: str, + aws_subnet_id: str, + aws_security_group_id: str, + aws_ami_id: str, + ec2_client: EC2Client, + autoscaling_ec2: AutoscalingEC2, + app_settings: ApplicationSettings, + faker: Faker, +): + assert app_settings.AUTOSCALING_EC2_INSTANCES + # we have nothing running now in ec2 + all_instances = await ec2_client.describe_instances() + assert not all_instances["Reservations"] + + with pytest.raises(Ec2InstanceNotFoundError): + await autoscaling_ec2.get_running_instance( + app_settings.AUTOSCALING_EC2_INSTANCES, + tag_keys=[], + instance_host_name=faker.pystr(), + ) + + +async def test_get_running_instance( mocked_aws_server_envs: None, aws_vpc_id: str, aws_subnet_id: str, @@ -238,10 +262,6 @@ async def test_get_instances( # we have nothing running now in ec2 all_instances = await ec2_client.describe_instances() assert not all_instances["Reservations"] - assert ( - await autoscaling_ec2.get_instances(app_settings.AUTOSCALING_EC2_INSTANCES, {}) - == [] - ) # create some instance instance_type = faker.pystr() @@ -256,11 +276,14 @@ async def test_get_instances( ) assert len(created_instances) == 1 - instance_received = await autoscaling_ec2.get_instances( + instance_received = await autoscaling_ec2.get_running_instance( app_settings.AUTOSCALING_EC2_INSTANCES, - tags=tags, + tag_keys=list(tags.keys()), + instance_host_name=created_instances[0].aws_private_dns.split(".ec2.internal")[ + 0 + ], ) - assert created_instances == instance_received + assert created_instances[0] == instance_received async def test_terminate_instance( From ae251e64775635b23facd21d463b30d5c52df6a9 Mon Sep 17 00:00:00 2001 From: matusdrobuliak66 Date: Fri, 24 Feb 2023 09:55:20 +0100 Subject: [PATCH 12/12] Revert "making working with timezone.utc consistent" This reverts commit 9035e95e3f5768fb37b6f4af261c5dbe0f6a9b5c. --- .../dynamic_scaling_core.py | 3 +-- .../src/simcore_service_catalog/api/routes/health.py | 2 +- .../api/routes/health.py | 2 +- .../simcore_service_director_v2/api/routes/health.py | 4 +--- .../modules/db/repositories/comp_runs.py | 4 ++-- .../modules/db/repositories/comp_tasks.py | 2 +- services/director-v2/tests/unit/with_dbs/conftest.py | 2 +- .../src/simcore_service_invitations/api/_health.py | 2 +- .../simcore_service_invitations/api/_invitations.py | 2 +- .../src/simcore_service_invitations/invitations.py | 2 +- services/invitations/tests/unit/test_invitations.py | 4 ++-- .../storage/src/simcore_service_storage/models.py | 2 +- .../src/simcore_service_storage/simcore_s3_dsm.py | 4 ++-- services/storage/tests/unit/test_dsm_dsmcleaner.py | 12 ++++-------- services/storage/tests/unit/test_utils.py | 2 +- .../server/tests/unit/isolated/test_users_models.py | 2 +- .../server/tests/unit/with_dbs/02/test_project_db.py | 2 +- 17 files changed, 23 insertions(+), 30 deletions(-) diff --git a/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py b/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py index b3ba987f661..e05891bf24a 100644 --- a/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py +++ b/services/autoscaling/src/simcore_service_autoscaling/dynamic_scaling_core.py @@ -109,8 +109,7 @@ async def _find_terminateable_instances( for instance in cluster.drained_nodes: # NOTE: AWS price is hourly based (e.g. same price for a machine used 2 minutes or 1 hour, so we wait until 55 minutes) elapsed_time_since_launched = ( - datetime.now(timezone.utc).replace(tzinfo=None) - - instance.ec2_instance.launch_time + datetime.now(timezone.utc) - instance.ec2_instance.launch_time ) elapsed_time_since_full_hour = elapsed_time_since_launched % timedelta(hours=1) if ( diff --git a/services/catalog/src/simcore_service_catalog/api/routes/health.py b/services/catalog/src/simcore_service_catalog/api/routes/health.py index b2051dd6c13..8f636f448ce 100644 --- a/services/catalog/src/simcore_service_catalog/api/routes/health.py +++ b/services/catalog/src/simcore_service_catalog/api/routes/health.py @@ -7,4 +7,4 @@ @router.get("/", include_in_schema=False) async def check_service_health(): - return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" diff --git a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py index cfb7cd0a33f..24404c1f3de 100644 --- a/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py +++ b/services/datcore-adapter/src/simcore_service_datcore_adapter/api/routes/health.py @@ -23,7 +23,7 @@ status_code=status.HTTP_200_OK, ) async def get_service_alive(): - return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" @router.get("/ready", status_code=status.HTTP_200_OK, response_model=AppStatusCheck) diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/health.py b/services/director-v2/src/simcore_service_director_v2/api/routes/health.py index 3aec6901fbd..77a63d204eb 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/health.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/health.py @@ -7,6 +7,4 @@ @router.get("/") async def check_service_health() -> dict[str, str]: - return { - "timestamp": f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" - } + return {"timestamp": f"{__name__}@{datetime.now(timezone.utc).isoformat()}"} diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py index 1f2fad7634b..62c0829a9e7 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_runs.py @@ -99,7 +99,7 @@ async def create( cluster_id=cluster_id if cluster_id != DEFAULT_CLUSTER_ID else None, iteration=iteration, result=RUNNING_STATE_TO_DB[RunningState.PUBLISHED], - started=datetime.now(timezone.utc).replace(tzinfo=None), + started=datetime.now(timezone.utc), ) .returning(literal_column("*")) ) @@ -133,7 +133,7 @@ async def set_run_result( ) -> Optional[CompRunsAtDB]: values = {"result": RUNNING_STATE_TO_DB[result_state]} if final_state: - values.update({"ended": datetime.now(timezone.utc).replace(tzinfo=None)}) + values.update({"ended": datetime.now(timezone.utc)}) return await self.update( user_id, project_id, diff --git a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py index 0558aa7ee53..5a1cdcaaeab 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/db/repositories/comp_tasks.py @@ -94,7 +94,7 @@ async def _generate_tasks_list_from_project( inputs=node.inputs, outputs=node.outputs, image=image, - submit=datetime.now(timezone.utc).replace(tzinfo=None), + submit=datetime.now(timezone.utc), state=task_state, internal_id=internal_id, node_class=node_class, diff --git a/services/director-v2/tests/unit/with_dbs/conftest.py b/services/director-v2/tests/unit/with_dbs/conftest.py index 2f9f5864aae..7a5648cc910 100644 --- a/services/director-v2/tests/unit/with_dbs/conftest.py +++ b/services/director-v2/tests/unit/with_dbs/conftest.py @@ -99,7 +99,7 @@ def creator( ), # type: ignore "node_class": to_node_class(node_data.key), "internal_id": internal_id + 1, - "submit": datetime.now(timezone.utc).replace(tzinfo=None), + "submit": datetime.now(timezone.utc), "job_id": generate_dask_job_id( service_key=node_data.key, service_version=node_data.version, diff --git a/services/invitations/src/simcore_service_invitations/api/_health.py b/services/invitations/src/simcore_service_invitations/api/_health.py index 79b24ceb468..611f200e73e 100644 --- a/services/invitations/src/simcore_service_invitations/api/_health.py +++ b/services/invitations/src/simcore_service_invitations/api/_health.py @@ -15,4 +15,4 @@ @router.get("/", response_class=PlainTextResponse) async def healthcheck(): - return f"{__name__}@{datetime.now(timezone.utc).replace(tzinfo=None).isoformat()}" + return f"{__name__}@{datetime.now(timezone.utc).isoformat()}" diff --git a/services/invitations/src/simcore_service_invitations/api/_invitations.py b/services/invitations/src/simcore_service_invitations/api/_invitations.py index 5207001d8df..c2577cdc16f 100644 --- a/services/invitations/src/simcore_service_invitations/api/_invitations.py +++ b/services/invitations/src/simcore_service_invitations/api/_invitations.py @@ -90,7 +90,7 @@ async def create_invitation( ) invitation = _InvitationContentAndLink( invitation_url=invitation_link, - created=datetime.now(timezone.utc).replace(tzinfo=None), + created=datetime.now(timezone.utc), **invitation_inputs.dict(), ) diff --git a/services/invitations/src/simcore_service_invitations/invitations.py b/services/invitations/src/simcore_service_invitations/invitations.py index 721b750e473..c4f5c5bb065 100644 --- a/services/invitations/src/simcore_service_invitations/invitations.py +++ b/services/invitations/src/simcore_service_invitations/invitations.py @@ -146,7 +146,7 @@ def _create_invitation_code( # builds content content = InvitationContent( **invitation_data.dict(), - created=datetime.now(timezone.utc).replace(tzinfo=None), + created=datetime.now(timezone.utc), ) content_jsonstr: str = _ContentWithShortNames.serialize(content) diff --git a/services/invitations/tests/unit/test_invitations.py b/services/invitations/tests/unit/test_invitations.py index 9ff79b4aec8..76fc2751448 100644 --- a/services/invitations/tests/unit/test_invitations.py +++ b/services/invitations/tests/unit/test_invitations.py @@ -39,7 +39,7 @@ def test_import_and_export_invitation_alias_by_alias( ): expected_content = InvitationContent( **invitation_data.dict(), - created=datetime.now(timezone.utc).replace(tzinfo=None), + created=datetime.now(timezone.utc), ) raw_data = _ContentWithShortNames.serialize(expected_content) @@ -52,7 +52,7 @@ def test_export_by_alias_produces_smaller_strings( ): content = InvitationContent( **invitation_data.dict(), - created=datetime.now(timezone.utc).replace(tzinfo=None), + created=datetime.now(timezone.utc), ) raw_data = _ContentWithShortNames.serialize(content) diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 37b0c0c3348..eaa1fc618e7 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -91,7 +91,7 @@ def from_simcore_node( ): parts = file_id.split("/") - now = datetime.now(timezone.utc).replace(tzinfo=None) + now = datetime.now(timezone.utc) fmd_kwargs = { "file_uuid": file_id, "location_id": location_id, diff --git a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py index 4a7a7ce4130..b1d416bdcb6 100644 --- a/services/storage/src/simcore_service_storage/simcore_s3_dsm.py +++ b/services/storage/src/simcore_service_storage/simcore_s3_dsm.py @@ -586,7 +586,7 @@ async def _clean_expired_uploads(self): 1. will try to update the entry from S3 backend if exists 2. will delete the entry if nothing exists in S3 backend. """ - now = datetime.now(timezone.utc).replace(tzinfo=None) + now = datetime.now(timezone.utc) async with self.engine.acquire() as conn: list_of_expired_uploads = await db_file_meta_data.list_fmds( conn, expired_after=now @@ -821,7 +821,7 @@ async def _create_fmd_for_upload( file_id: StorageFileID, upload_id: Optional[UploadID], ) -> FileMetaDataAtDB: - now = datetime.now(timezone.utc).replace(tzinfo=None) + now = datetime.now(timezone.utc) upload_expiration_date = now + timedelta( seconds=self.settings.STORAGE_DEFAULT_PRESIGNED_LINK_EXPIRATION_SECONDS ) diff --git a/services/storage/tests/unit/test_dsm_dsmcleaner.py b/services/storage/tests/unit/test_dsm_dsmcleaner.py index 0fce1950da7..ee90143dd2b 100644 --- a/services/storage/tests/unit/test_dsm_dsmcleaner.py +++ b/services/storage/tests/unit/test_dsm_dsmcleaner.py @@ -120,7 +120,7 @@ async def test_clean_expired_uploads_deletes_expired_pending_uploads( await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == simcore_file_id) - .values(upload_expires_at=datetime.now(timezone.utc).replace(tzinfo=None)) + .values(upload_expires_at=datetime.now(timezone.utc)) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -195,7 +195,7 @@ async def test_clean_expired_uploads_reverts_to_last_known_version_expired_pendi await conn.execute( file_meta_data.update() .where(file_meta_data.c.file_id == file_id) - .values(upload_expires_at=datetime.now(timezone.utc).replace(tzinfo=None)) + .values(upload_expires_at=datetime.now(timezone.utc)) ) await asyncio.sleep(1) await simcore_s3_dsm.clean_expired_uploads() @@ -230,9 +230,7 @@ async def test_clean_expired_uploads_does_not_clean_multipart_upload_on_creation ): """This test reproduces what create_file_upload_links in dsm does, but running the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.now(timezone.utc).replace(tzinfo=None) + timedelta( - minutes=5 - ) + later_than_now = datetime.now(timezone.utc) + timedelta(minutes=5) fmd = FileMetaData.from_simcore_node( user_id, simcore_file_id, @@ -291,9 +289,7 @@ async def test_clean_expired_uploads_cleans_dangling_multipart_uploads_if_no_cor ): """This test reproduces what create_file_upload_links in dsm does, but running the cleaner in between to ensure the cleaner does not break the mechanism""" - later_than_now = datetime.now(timezone.utc).replace(tzinfo=None) + timedelta( - minutes=5 - ) + later_than_now = datetime.now(timezone.utc) + timedelta(minutes=5) fmd = FileMetaData.from_simcore_node( user_id, simcore_file_id, diff --git a/services/storage/tests/unit/test_utils.py b/services/storage/tests/unit/test_utils.py index a871fd5ca61..ee0d8753cd9 100644 --- a/services/storage/tests/unit/test_utils.py +++ b/services/storage/tests/unit/test_utils.py @@ -54,7 +54,7 @@ async def test_download_files(tmpdir): random.randint(1, 1000000), "some_valid_entity_tag", None, - datetime.now(timezone.utc).replace(tzinfo=None), + datetime.now(timezone.utc), False, ), (random.randint(1, 1000000), "some_valid_entity_tag", None, None, True), diff --git a/services/web/server/tests/unit/isolated/test_users_models.py b/services/web/server/tests/unit/isolated/test_users_models.py index 9d48c492184..24d01bcf66d 100644 --- a/services/web/server/tests/unit/isolated/test_users_models.py +++ b/services/web/server/tests/unit/isolated/test_users_models.py @@ -50,7 +50,7 @@ def test_user_models_examples( def test_profile_get_expiration_date(faker: Faker): - fake_expiration = datetime.now(timezone.utc).replace(tzinfo=None) + fake_expiration = datetime.now(timezone.utc) profile = ProfileGet( id=1, login=faker.email(), role=UserRole.ADMIN, expiration_date=fake_expiration diff --git a/services/web/server/tests/unit/with_dbs/02/test_project_db.py b/services/web/server/tests/unit/with_dbs/02/test_project_db.py index a9c4fb910bc..b190d486d7f 100644 --- a/services/web/server/tests/unit/with_dbs/02/test_project_db.py +++ b/services/web/server/tests/unit/with_dbs/02/test_project_db.py @@ -90,7 +90,7 @@ def test_convert_to_schema_names(fake_project: dict[str, Any]): assert col is not None # test date time conversion - date = datetime.now(timezone.utc).replace(tzinfo=None) + date = datetime.now(timezone.utc) db_entries["creation_date"] = date schema_entries = convert_to_schema_names(db_entries, fake_project["prjOwner"]) assert "creationDate" in schema_entries