Skip to content

Commit

Permalink
Merge branch 'master' into Publishing-as-Tutorials
Browse files Browse the repository at this point in the history
  • Loading branch information
jsaq007 authored Dec 6, 2023
2 parents d3947d0 + 2b4bebc commit 0e88fa4
Show file tree
Hide file tree
Showing 100 changed files with 1,654 additions and 1,164 deletions.
4 changes: 2 additions & 2 deletions .env-devel
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,8 @@ PAYMENTS_AUTORECHARGE_MIN_BALANCE_IN_CREDITS=100
PAYMENTS_AUTORECHARGE_ENABLED=1
PAYMENTS_FAKE_COMPLETION_DELAY_SEC=10
PAYMENTS_FAKE_COMPLETION=0
PAYMENTS_GATEWAY_API_SECRET=replace-with-api-secret
PAYMENTS_GATEWAY_URL=https://fake-payment-gateway.com
PAYMENTS_GATEWAY_API_SECRET=adminadmin
PAYMENTS_GATEWAY_URL=http://127.0.0.1:32769
PAYMENTS_HOST=payments
PAYMENTS_LOGLEVEL=INFO
PAYMENTS_PASSWORD=adminadmin
Expand Down
3 changes: 0 additions & 3 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,6 @@ updates:
- dependency-name: httpx
versions:
- 0.17.0
- dependency-name: minio
versions:
- 7.0.0
- package-ecosystem: pip
directory: "/packages/service-library"
schedule:
Expand Down
10 changes: 0 additions & 10 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -269,23 +269,15 @@ CPU_COUNT = $(shell cat /proc/cpuinfo | grep processor | wc -l )


.stack-ops.yml: .env $(docker-compose-configs)
# Compiling config file for filestash
$(eval TMP_PATH_TO_FILESTASH_CONFIG=$(shell set -o allexport && \
source $(CURDIR)/.env && \
set +o allexport && \
python3 scripts/filestash/create_config.py))
# Creating config for ops stack to $@
# -> filestash config at $(TMP_PATH_TO_FILESTASH_CONFIG)
ifdef ops_ci
@$(shell \
export TMP_PATH_TO_FILESTASH_CONFIG="${TMP_PATH_TO_FILESTASH_CONFIG}" && \
scripts/docker/docker-compose-config.bash -e .env \
services/docker-compose-ops-ci.yml \
> $@ \
)
else
@$(shell \
export TMP_PATH_TO_FILESTASH_CONFIG="${TMP_PATH_TO_FILESTASH_CONFIG}" && \
scripts/docker/docker-compose-config.bash -e .env \
services/docker-compose-ops.yml \
> $@ \
Expand All @@ -299,7 +291,6 @@ endif
.deploy-ops: .stack-ops.yml
# Deploy stack 'ops'
ifndef ops_disabled
# -> filestash config at $(TMP_PATH_TO_FILESTASH_CONFIG)
docker stack deploy --with-registry-auth -c $< ops
else
@echo "Explicitly disabled with ops_disabled flag in CLI"
Expand Down Expand Up @@ -328,7 +319,6 @@ printf "$$rows" "Invitations" "http://$(get_my_ip).nip.io:8008/dev/doc" $${INVIT
printf "$$rows" "Payments" "http://$(get_my_ip).nip.io:8011/dev/doc" $${PAYMENTS_USERNAME} $${PAYMENTS_PASSWORD};\
printf "$$rows" "Rabbit Dashboard" "http://$(get_my_ip).nip.io:15672" admin adminadmin;\
printf "$$rows" "Traefik Dashboard" "http://$(get_my_ip).nip.io:8080/dashboard/";\
printf "$$rows" "Storage S3 Filestash" "http://$(get_my_ip).nip.io:9002" 12345678 12345678;\
printf "$$rows" "Storage S3 Minio" "http://$(get_my_ip).nip.io:9001" 12345678 12345678;\

printf "\n%s\n" "⚠️ if a DNS is not used (as displayed above), the interactive services started via dynamic-sidecar";\
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
from typing import Any, TypeAlias

from .constants import DASK_TASK_EC2_RESOURCE_RESTRICTION_KEY

DaskTaskResources: TypeAlias = dict[str, Any]


def create_ec2_resource_constraint_key(ec2_instance_type: str) -> str:
return f"{DASK_TASK_EC2_RESOURCE_RESTRICTION_KEY}:{ec2_instance_type}"


def get_ec2_instance_type_from_resources(
task_resources: DaskTaskResources,
) -> str | None:
for resource_name in task_resources:
if resource_name.startswith(DASK_TASK_EC2_RESOURCE_RESTRICTION_KEY):
return resource_name.split(":")[-1]
return None
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from dask_task_models_library.constants import DASK_TASK_EC2_RESOURCE_RESTRICTION_KEY
from dask_task_models_library.resource_constraints import (
create_ec2_resource_constraint_key,
get_ec2_instance_type_from_resources,
)
from faker import Faker


def test_create_ec2_resource_constraint_key(faker: Faker):
faker_instance_type = faker.pystr()
assert (
create_ec2_resource_constraint_key(faker_instance_type)
== f"{DASK_TASK_EC2_RESOURCE_RESTRICTION_KEY}:{faker_instance_type}"
)

empty_instance_type = ""
assert (
create_ec2_resource_constraint_key(empty_instance_type)
== f"{DASK_TASK_EC2_RESOURCE_RESTRICTION_KEY}:"
)


def test_get_ec2_instance_type_from_resources(faker: Faker):
empty_task_resources = {}
assert get_ec2_instance_type_from_resources(empty_task_resources) is None
no_ec2_types_in_resources = {"blahblah": 1}
assert get_ec2_instance_type_from_resources(no_ec2_types_in_resources) is None

faker_instance_type = faker.pystr()
ec2_type_in_resources = {create_ec2_resource_constraint_key(faker_instance_type): 1}
assert (
get_ec2_instance_type_from_resources(ec2_type_in_resources)
== faker_instance_type
)
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Final

SOCKET_IO_PAYMENT_COMPLETED_EVENT: Final[str] = "paymentCompleted"
SOCKET_IO_PAYMENT_METHOD_ACKED_EVENT: Final[str] = "paymentMethodAcknoledged"
SOCKET_IO_PAYMENT_METHOD_ACKED_EVENT: Final[str] = "paymentMethodAcknowledged"
7 changes: 5 additions & 2 deletions packages/models-library/src/models_library/invitations.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
from datetime import datetime, timezone
from typing import Final

from models_library.products import ProductName
from pydantic import BaseModel, EmailStr, Field, PositiveInt, validator

_MAX_LEN: Final = 40


class InvitationInputs(BaseModel):
"""Input data necessary to create an invitation"""
Expand All @@ -11,7 +14,7 @@ class InvitationInputs(BaseModel):
...,
description="Identifies who issued the invitation. E.g. an email, a service name etc. NOTE: it will be trimmed if exceeds maximum",
min_length=1,
max_length=30,
max_length=_MAX_LEN,
)
guest: EmailStr = Field(
...,
Expand All @@ -35,7 +38,7 @@ class InvitationInputs(BaseModel):
@classmethod
def trim_long_issuers_to_max_length(cls, v):
if v and isinstance(v, str):
return v[:29]
return v[: _MAX_LEN - 1]
return v


Expand Down
102 changes: 102 additions & 0 deletions packages/pytest-simcore/src/pytest_simcore/aws_s3_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
# pylint: disable=redefined-outer-name
# pylint: disable=unused-argument
# pylint: disable=unused-import


import contextlib
import typing

import aioboto3
import pytest
from aiobotocore.session import ClientCreatorContext
from botocore.client import Config
from settings_library.s3 import S3Settings
from types_aiobotocore_s3 import S3Client


@pytest.fixture
def s3_settings() -> S3Settings:
return S3Settings.create_from_envs()


@pytest.fixture
async def s3_client(s3_settings: S3Settings) -> typing.AsyncIterator[S3Client]:
session = aioboto3.Session()
exit_stack = contextlib.AsyncExitStack()
session_client = session.client(
"s3",
endpoint_url=s3_settings.S3_ENDPOINT,
aws_access_key_id=s3_settings.S3_ACCESS_KEY,
aws_secret_access_key=s3_settings.S3_SECRET_KEY,
aws_session_token=s3_settings.S3_ACCESS_TOKEN,
region_name=s3_settings.S3_REGION,
config=Config(signature_version="s3v4"),
)
assert isinstance(session_client, ClientCreatorContext)
client = typing.cast(S3Client, await exit_stack.enter_async_context(session_client))

yield client

await exit_stack.aclose()


async def _empty_bucket(s3_client: S3Client, bucket_name: str) -> None:
# List object versions
response = await s3_client.list_object_versions(Bucket=bucket_name)

# Delete all object versions
for version in response.get("Versions", []):
assert "Key" in version
assert "VersionId" in version
await s3_client.delete_object(
Bucket=bucket_name, Key=version["Key"], VersionId=version["VersionId"]
)

# Delete all delete markers
for marker in response.get("DeleteMarkers", []):
assert "Key" in marker
assert "VersionId" in marker
await s3_client.delete_object(
Bucket=bucket_name, Key=marker["Key"], VersionId=marker["VersionId"]
)

# Delete remaining objects in the bucket
response = await s3_client.list_objects(Bucket=bucket_name)
for obj in response.get("Contents", []):
assert "Key" in obj
await s3_client.delete_object(Bucket=bucket_name, Key=obj["Key"])


@pytest.fixture
async def s3_bucket(
s3_settings: S3Settings, s3_client: S3Client
) -> typing.AsyncIterator[str]:
bucket_name = s3_settings.S3_BUCKET_NAME

response = await s3_client.list_buckets()
bucket_exists = bucket_name in [
bucket_struct.get("Name") for bucket_struct in response["Buckets"]
]
if bucket_exists:
await _empty_bucket(s3_client, bucket_name)

if not bucket_exists:
await s3_client.create_bucket(Bucket=bucket_name)
response = await s3_client.list_buckets()
assert response["Buckets"]
assert bucket_name in [
bucket_struct.get("Name") for bucket_struct in response["Buckets"]
], f"failed creating {bucket_name}"

yield bucket_name

await _empty_bucket(s3_client, bucket_name)


@pytest.fixture
async def with_bucket_versioning_enabled(s3_client: S3Client, s3_bucket: str) -> str:
await s3_client.put_bucket_versioning(
Bucket=s3_bucket,
VersioningConfiguration={"MFADelete": "Disabled", "Status": "Enabled"},
)
return s3_bucket
28 changes: 18 additions & 10 deletions packages/pytest-simcore/src/pytest_simcore/aws_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,10 @@
import pytest
import requests
from aiohttp.test_utils import unused_port
from faker import Faker
from moto.server import ThreadedMotoServer
from settings_library.ec2 import EC2Settings
from settings_library.s3 import S3Settings

from .helpers.utils_envs import EnvVarsDict, setenvs_from_dict
from .helpers.utils_host import get_localhost_ip
Expand Down Expand Up @@ -71,16 +73,22 @@ def mocked_ec2_server_envs(


@pytest.fixture
async def mocked_s3_server_envs(
mocked_aws_server: ThreadedMotoServer,
reset_aws_server_state: None,
def mocked_s3_server_settings(
mocked_aws_server: ThreadedMotoServer, reset_aws_server_state: None, faker: Faker
) -> S3Settings:
return S3Settings(
S3_ACCESS_KEY="xxx",
S3_ENDPOINT=f"http://{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001
S3_SECRET_KEY="xxx", # noqa: S106
S3_BUCKET_NAME=f"pytest{faker.pystr().lower()}",
S3_SECURE=False,
)


@pytest.fixture
def mocked_s3_server_envs(
mocked_s3_server_settings: S3Settings,
monkeypatch: pytest.MonkeyPatch,
) -> EnvVarsDict:
changed_envs = {
"S3_SECURE": "false",
"S3_ENDPOINT": f"{mocked_aws_server._ip_address}:{mocked_aws_server._port}", # pylint: disable=protected-access # noqa: SLF001
"S3_ACCESS_KEY": "xxx",
"S3_SECRET_KEY": "xxx",
"S3_BUCKET_NAME": "pytestbucket",
}
changed_envs: EnvVarsDict = mocked_s3_server_settings.dict(exclude_unset=True)
return setenvs_from_dict(monkeypatch, changed_envs)
34 changes: 4 additions & 30 deletions packages/pytest-simcore/src/pytest_simcore/docker_compose.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import os
import re
import shutil
import subprocess
import sys
from collections.abc import Iterator
from copy import deepcopy
Expand Down Expand Up @@ -82,6 +81,9 @@ def testing_environ_vars(env_devel_file: Path) -> EnvVarsDict:
if "DOCKER_IMAGE_TAG" not in os.environ:
env_devel["DOCKER_IMAGE_TAG"] = "production"

# ensure we do not use the bucket of simcore or so
env_devel["S3_BUCKET_NAME"] = "pytestbucket"

return {key: value for key, value in env_devel.items() if value is not None}


Expand Down Expand Up @@ -166,39 +168,12 @@ def simcore_docker_compose(
return compose_specs


@pytest.fixture(scope="module")
def inject_filestash_config_path_env(
osparc_simcore_scripts_dir: Path,
env_file_for_testing: Path,
) -> EnvVarsDict:
create_filestash_config_py = (
osparc_simcore_scripts_dir / "filestash" / "create_config.py"
)

# ensures .env at git_root_dir, which will be used as current directory
assert env_file_for_testing.exists()
env_values = dotenv_values(env_file_for_testing)

process = subprocess.run(
["python3", f"{create_filestash_config_py}"],
shell=False,
check=True,
stdout=subprocess.PIPE,
env=env_values,
)
filestash_config_json_path = Path(process.stdout.decode("utf-8").strip())
assert filestash_config_json_path.exists()

return {"TMP_PATH_TO_FILESTASH_CONFIG": f"{filestash_config_json_path}"}


@pytest.fixture(scope="module")
def ops_docker_compose(
osparc_simcore_root_dir: Path,
osparc_simcore_scripts_dir: Path,
env_file_for_testing: Path,
temp_folder: Path,
inject_filestash_config_path_env: dict[str, str],
) -> dict[str, Any]:
"""Filters only services in docker-compose-ops.yml and returns yaml data
Expand All @@ -219,7 +194,6 @@ def ops_docker_compose(
docker_compose_paths=docker_compose_path,
env_file_path=env_file_for_testing,
destination_path=temp_folder / "ops_docker_compose.yml",
additional_envs=inject_filestash_config_path_env,
)
# NOTE: do not add indent. Copy&Paste log into editor instead
print(
Expand Down Expand Up @@ -277,7 +251,7 @@ def ops_docker_compose_file(
docker_compose_path = Path(temp_folder / "ops_docker_compose.filtered.yml")

# these services are useless when running in the CI
ops_view_only_services = ["adminer", "redis-commander", "portainer", "filestash"]
ops_view_only_services = ["adminer", "redis-commander", "portainer"]
if "CI" in os.environ:
print(
f"WARNING: Services such as {ops_view_only_services!r} are removed from the stack when running in the CI"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
from faker import Faker
from simcore_postgres_database.models.api_keys import api_keys
from simcore_postgres_database.models.comp_pipeline import StateType
from simcore_postgres_database.models.groups import groups
from simcore_postgres_database.models.payments_methods import InitPromptAckFlowState
from simcore_postgres_database.models.payments_transactions import (
PaymentTransactionState,
Expand Down Expand Up @@ -101,11 +102,15 @@ def random_project(**overrides) -> dict[str, Any]:


def random_group(**overrides) -> dict[str, Any]:

data = {
"name": FAKE.company(),
"description": FAKE.text(),
"type": GroupType.STANDARD.name,
}

assert set(data.keys()).issubset({c.name for c in groups.columns}) # nosec

data.update(overrides)
return data

Expand Down
Loading

0 comments on commit 0e88fa4

Please sign in to comment.