From 47b53580b6fa0f9559ed92d385af74a824b5b69b Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 19:18:12 +0200 Subject: [PATCH 01/20] Upgraded docker config to slim --- services/storage/Dockerfile | 169 +++++++++++++++----------- services/storage/docker/boot.sh | 71 +++++------ services/storage/docker/entrypoint.sh | 93 ++++++++------ 3 files changed, 186 insertions(+), 147 deletions(-) diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index 99dfd140d6c..33fa6144763 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -1,5 +1,5 @@ -#FROM python:3.6-alpine as base -FROM itisfoundation/python-with-pandas:3.6-alpine as base +ARG PYTHON_VERSION="3.6.10" +FROM python:${PYTHON_VERSION}-slim-buster as base # # USAGE: # cd sercices/storage @@ -8,26 +8,50 @@ FROM itisfoundation/python-with-pandas:3.6-alpine as base # # REQUIRED: context expected at ``osparc-simcore/`` folder because we need access to osparc-simcore/packages - LABEL maintainer=mguidon +RUN set -eux; \ + apt-get update; \ + apt-get install -y gosu; \ + rm -rf /var/lib/apt/lists/*; \ + # verify that the binary works + gosu nobody true + + # simcore-user uid=8004(scu) gid=8004(scu) groups=8004(scu) -RUN adduser -D -u 8004 -s /bin/sh -h /home/scu scu +ENV SC_USER_ID=8004 \ + SC_USER_NAME=scu \ + SC_BUILD_TARGET=base \ + SC_BOOT_MODE=default + +RUN adduser \ + --uid ${SC_USER_ID} \ + --disabled-password \ + --gecos "" \ + --shell /bin/sh \ + --home /home/${SC_USER_NAME} \ + ${SC_USER_NAME} + + +# Sets utf-8 encoding for Python et al +ENV LANG=C.UTF-8 -RUN apk add --no-cache \ - su-exec +# Turns off writing .pyc files; superfluous on an ephemeral container. +ENV PYTHONDONTWRITEBYTECODE=1 \ + VIRTUAL_ENV=/home/scu/.venv +# Ensures that the python and pip executables used +# in the image will be those from our virtualenv. +ENV PATH="${VIRTUAL_ENV}/bin:$PATH" -ENV PATH "/home/scu/.local/bin:$PATH" -ENV HOME /home/scu -# All SC_ variables are customized -ENV SC_PIP pip3 --no-cache-dir -ENV SC_PIP2 pip2 --no-cache-dir ENV SC_BUILD_TARGET base ENV SC_BOOT_MODE default EXPOSE 8080 +# -------------------------- ------------------------------- + + # -------------------------- Build stage ------------------- # Installs build/package management tools and third party dependencies @@ -37,57 +61,30 @@ EXPOSE 8080 FROM base as build -# Installing client libraries and any other package you need -# -# libpq: client library for PostgreSQL https://www.postgresql.org/docs/9.5/libpq.html -# libstdc++: needed in ujson https://github.com/kohlschutter/junixsocket/issues/33 -# -RUN apk update && \ - apk add --no-cache \ - libpq \ - libstdc++ - -RUN apk add --no-cache \ - postgresql-dev \ - gcc \ - g++ \ - libc-dev \ - libffi-dev \ - linux-headers - -RUN $SC_PIP install --upgrade \ - pip~=20.1.1 \ - wheel \ - setuptools +ENV SC_BUILD_TARGET build -WORKDIR /build +RUN apt-get update &&\ + apt-get install -y --no-install-recommends \ + build-essential -# install base 3rd party dependencies -COPY --chown=scu:scu services/storage/requirements/*.txt \ - tmp/storage/requirements/ -RUN $SC_PIP install \ - -r tmp/storage/requirements/_base.txt +# NOTE: python virtualenv is used here such that installed +# packages may be moved to production image easily by copying the venv +RUN python -m venv ${VIRTUAL_ENV} -# --------------------------Development stage ------------------- -# Source code accessible in host but runs in container -# Runs as scu with same gid/uid as host -# -# + /devel WORKDIR -# + packages (mounted volume) -# + services (mounted volume) -# -FROM build as development +RUN pip --no-cache-dir install --upgrade \ + pip~=20.1.1 \ + wheel \ + setuptools + +WORKDIR /build -ENV SC_BUILD_TARGET development -ENV SC_BOOT_MODE debug-ptvsd +# install only base 3rd party dependencies +COPY --chown=scu:scu services/storage/requirements/_base.txt requirements_base.txt +RUN pip --no-cache-dir --quiet install -r requirements_base.txt +# -------------------------- ------------------------------- -WORKDIR /devel -VOLUME /devel/packages -VOLUME /devel/services/storage/ -ENTRYPOINT [ "/bin/sh", "services/storage/docker/entrypoint.sh" ] -CMD ["/bin/sh", "services/storage/docker/boot.sh"] # --------------------------Cache stage ------------------- @@ -100,36 +97,72 @@ FROM build as cache ENV SC_BUILD_TARGET cache +# 2nd party packages COPY --chown=scu:scu packages /build/packages COPY --chown=scu:scu services/storage /build/services/storage WORKDIR /build/services/storage -RUN $SC_PIP install -r requirements/prod.txt &&\ - $SC_PIP list -v +RUN pip --no-cache-dir --quiet install -r requirements/prod.txt +# -------------------------- ------------------------------- + + # --------------------------Production stage ------------------- # Final cleanup up to reduce image size and startup setup # -FROM cache as production +FROM base as production + +ENV SC_BUILD_TARGET=production \ + SC_BOOT_MODE=production \ + SC_HEALTHCHECK_INTERVAL=30 \ + SC_HEALTHCHECK_RETRY=3 -ENV SC_BUILD_TARGET production ENV PYTHONOPTIMIZE=TRUE WORKDIR /home/scu -RUN mkdir -p services/storage && chown scu:scu services/storage && \ - mv /build/services/storage/docker services/storage/docker && \ - rm -rf /build +# bring installed package without build tools +COPY --from=cache --chown=scu:scu ${VIRTUAL_ENV} ${VIRTUAL_ENV} -RUN apk del --no-cache\ - gcc +# copy docker entrypoint and boot scripts +COPY --chown=scu:scu services/storage/docker services/storage/docker +RUN chmod +x services/storage/docker/*.sh HEALTHCHECK --interval=30s \ - --timeout=120s \ - --start-period=30s \ - --retries=3 \ - CMD ["python3", "/home/scu/services/storage/docker/healthcheck.py", "http://localhost:8080/v0/"] + --timeout=120s \ + --start-period=30s \ + --retries=3 \ + CMD ["python3", "/home/scu/services/storage/docker/healthcheck.py", "http://localhost:8080/v0/"] + + +ENTRYPOINT [ "/bin/sh", "services/storage/docker/entrypoint.sh" ] +CMD ["/bin/sh", "services/storage/docker/boot.sh"] +# -------------------------- ------------------------------- + + + + +# --------------------------Development stage ------------------- +# Source code accessible in host but runs in container +# Runs as scu with same gid/uid as host +# +# + /devel WORKDIR +# + packages (mounted volume) +# + services (mounted volume) +# +FROM build as development + +ENV SC_BUILD_TARGET=development \ + SC_DEVEL_MOUNT=/devel/services/storage/ + + +RUN chown -R scu:scu ${VIRTUAL_ENV} + +# NOTE: declaring VOLUMEs here makes troubles mounting +# the client's output folder to /devel/services/web/client. +# The latter ls no files ENTRYPOINT [ "/bin/sh", "services/storage/docker/entrypoint.sh" ] CMD ["/bin/sh", "services/storage/docker/boot.sh"] +# -------------------------- ------------------------------- diff --git a/services/storage/docker/boot.sh b/services/storage/docker/boot.sh index 96bd6ce3a8e..e8c144f6c62 100755 --- a/services/storage/docker/boot.sh +++ b/services/storage/docker/boot.sh @@ -1,57 +1,46 @@ #!/bin/sh -# -INFO="INFO: [`basename "$0"`] " -ERROR="ERROR: [`basename "$0"`] " +set -o errexit +set -o nounset -# BOOTING application --------------------------------------------- -echo $INFO "Booting in ${SC_BOOT_MODE} mode ..." - - -if [[ ${SC_BUILD_TARGET} == "development" ]] -then - echo $INFO "User :`id $(whoami)`" - echo $INFO "Workdir :`pwd`" - echo $INFO "Environment :" - printenv | sed 's/=/: /' | sed 's/^/ /' | sort - #-------------------- - - APP_CONFIG=docker-dev-config.yaml +IFS=$(printf '\n\t') - cd services/storage - $SC_PIP install --user -r requirements/dev.txt - cd /devel +INFO="INFO: [$(basename "$0")] " - #-------------------- - echo $INFO "Python :" +# BOOTING application --------------------------------------------- +echo "$INFO" "Booting in ${SC_BOOT_MODE} mode ..." +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" + +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "Environment :" + printenv | sed 's/=/: /' | sed 's/^/ /' | sort + echo "$INFO" "Python :" python --version | sed 's/^/ /' - which python | sed 's/^/ /' - echo $INFO "PIP :" - $SC_PIP list | sed 's/^/ /' + command -v python | sed 's/^/ /' + + cd services/storage || exit 1 + pip --quiet --no-cache-dir install -r requirements/dev.txt + cd - || exit 1 + echo "$INFO" "PIP :" + pip list | sed 's/^/ /' - #------------ - echo " setting entrypoint to use watchmedo autorestart..." + APP_CONFIG=docker-dev-config.yaml + echo "$INFO" "Setting entrypoint to use watchmedo autorestart..." entrypoint='watchmedo auto-restart --recursive --pattern="*.py" --' -elif [[ ${SC_BUILD_TARGET} == "production" ]] -then +elif [ "${SC_BUILD_TARGET}" = "production" ]; then APP_CONFIG=docker-prod-config.yaml - entrypoint='' + entrypoint="" fi - # RUNNING application ---------------------------------------- -if [[ ${SC_BOOT_MODE} == "debug-pdb" ]] -then - # NOTE: needs stdin_open: true and tty: true - echo $INFO "Debugger attached: https://docs.python.org/3.6/library/pdb.html#debugger-commands ..." - echo $INFO "Running: import pdb, simcore_service_storage.cli; pdb.run('simcore_service_storage.cli.main([\'-c\',\'${APP_CONFIG}\'])')" - eval $INFO "$entrypoint" python -c "import pdb, simcore_service_storage.cli; \ - pdb.run('simcore_service_storage.cli.main([\'-c\',\'${APP_CONFIG}\'])')" -elif [[ ${SC_BOOT_MODE} == "debug-ptvsd" ]] -then - echo $INFO "PTVSD Debugger initializing in port 3003 with ${APP_CONFIG}" +echo "$INFO" "Selected config $APP_CONFIG" + +if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then + # NOTE: needs ptvsd installed + echo "$INFO" "PTVSD Debugger initializing in port 3000 with ${APP_CONFIG}" eval "$entrypoint" python3 -m ptvsd --host 0.0.0.0 --port 3000 -m \ - simcore_service_storage --config $APP_CONFIG + simcore_service_storage --config $APP_CONFIG else exec simcore-service-storage --config $APP_CONFIG fi diff --git a/services/storage/docker/entrypoint.sh b/services/storage/docker/entrypoint.sh index b042885f16b..6a05352f51c 100755 --- a/services/storage/docker/entrypoint.sh +++ b/services/storage/docker/entrypoint.sh @@ -1,7 +1,12 @@ #!/bin/sh -# -INFO="INFO: [`basename "$0"`] " -ERROR="ERROR: [`basename "$0"`] " +set -o errexit +set -o nounset + +IFS=$(printf '\n\t') + +INFO="INFO: [$(basename "$0")] " +WARNING="WARNING: [$(basename "$0")] " +ERROR="ERROR: [$(basename "$0")] " # This entrypoint script: # @@ -9,49 +14,61 @@ ERROR="ERROR: [`basename "$0"`] " # - Notice that the container *starts* as --user [default root] but # *runs* as non-root user [scu] # -echo $INFO "Entrypoint for stage ${SC_BUILD_TARGET} ..." -echo " User :`id $(whoami)`" -echo " Workdir :`pwd`" - -echo $INFO "updating certificates..." -update-ca-certificates -echo $INFO "certificates updated" +echo "$INFO" "Entrypoint for stage ${SC_BUILD_TARGET} ..." +echo "$INFO" "User :$(id "$(whoami)")" +echo "$INFO" "Workdir : $(pwd)" +echo "$INFO" "User : $(id scu)" +echo "$INFO" "python : $(command -v python)" +echo "$INFO" "pip : $(command -v pip)" -if [[ ${SC_BUILD_TARGET} == "development" ]] -then - # NOTE: expects docker run ... -v $(pwd):/devel/services/storage - DEVEL_MOUNT=/devel/services/storage - stat $DEVEL_MOUNT &> /dev/null || \ - (echo $ERROR "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) # FIXME: exit does not stop script +if [ "${SC_BUILD_TARGET}" = "development" ]; then + echo "$INFO" "development mode detected..." + # NOTE: expects docker run ... -v $(pwd):$DEVEL_MOUNT + DEVEL_MOUNT="${SC_DEVEL_MOUNT}" - USERID=$(stat -c %u $DEVEL_MOUNT) - GROUPID=$(stat -c %g $DEVEL_MOUNT) - GROUPNAME=$(getent group ${GROUPID} | cut -d: -f1) + stat "$DEVEL_MOUNT" >/dev/null 2>&1 || + (echo "$ERROR" "You must mount '$DEVEL_MOUNT' to deduce user and group ids" && exit 1) - if [[ $USERID -eq 0 ]] - then - addgroup scu root + echo "$INFO" "setting correct user id/group id..." + HOST_USERID=$(stat --format=%u "${DEVEL_MOUNT}") + HOST_GROUPID=$(stat --format=%g "${DEVEL_MOUNT}") + CONT_GROUPNAME=$(getent group "${HOST_GROUPID}" | cut --delimiter=: --fields=1) + if [ "$HOST_USERID" -eq 0 ]; then + echo "$WARNING" "Folder mounted owned by root user... adding $SC_USER_NAME to root..." + adduser "$SC_USER_NAME" root + else + echo "$INFO" "Folder mounted owned by user $HOST_USERID:$HOST_GROUPID-'$CONT_GROUPNAME'..." + # take host's credentials in $SC_USER_NAME + if [ -z "$CONT_GROUPNAME" ]; then + echo "$WARNING" "Creating new group grp$SC_USER_NAME" + CONT_GROUPNAME=grp$SC_USER_NAME + addgroup --gid "$HOST_GROUPID" "$CONT_GROUPNAME" else - # take host's credentials in scu - if [[ -z "$GROUPNAME" ]] - then - GROUPNAME=host_group - addgroup -g $GROUPID $GROUPNAME - else - addgroup scu $GROUPNAME - fi - - deluser scu &> /dev/null - adduser -u $USERID -G $GROUPNAME -D -s /bin/sh scu + echo "$INFO" "group already exists" fi + echo "$INFO" "Adding $SC_USER_NAME to group $CONT_GROUPNAME..." + adduser "$SC_USER_NAME" "$CONT_GROUPNAME" + + echo "$WARNING" "Changing ownership [this could take some time]" + echo "$INFO" "Changing $SC_USER_NAME:$SC_USER_NAME ($SC_USER_ID:$SC_USER_ID) to $SC_USER_NAME:$CONT_GROUPNAME ($HOST_USERID:$HOST_GROUPID)" + usermod --uid "$HOST_USERID" --gid "$HOST_GROUPID" "$SC_USER_NAME" + + echo "$INFO" "Changing group properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -group "$SC_USER_ID" -exec chgrp --no-dereference "$CONT_GROUPNAME" {} \; + # change user property of files already around + echo "$INFO" "Changing ownership properties of files around from $SC_USER_ID to group $CONT_GROUPNAME" + find / -path /proc -prune -o -user "$SC_USER_ID" -exec chown --no-dereference "$SC_USER_NAME" {} \; + fi fi -if [[ ${SC_BOOT_MODE} == "debug-ptvsd" ]] -then +if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then # NOTE: production does NOT pre-installs ptvsd - python3 -m pip install ptvsd + pip install --no-cache-dir ptvsd fi -echo $INFO "Starting boot ..." -exec su-exec scu "$@" +echo "$INFO Starting $* ..." +echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")" +echo " local dir : $(ls -al)" + +exec gosu "$SC_USER_NAME" "$@" From 917dbeef96d5e0ffc80644c9cd52d0b11198569d Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 21:22:50 +0200 Subject: [PATCH 02/20] Upgrade requirements --- services/storage/requirements/_base.in | 35 ++++--- services/storage/requirements/_base.txt | 86 ++++++++--------- services/storage/requirements/_test.in | 11 +-- services/storage/requirements/_test.txt | 96 +++++++++---------- .../python-with-pandas_89f709.txt | 8 -- 5 files changed, 106 insertions(+), 130 deletions(-) delete mode 100644 services/storage/requirements/python-with-pandas_89f709.txt diff --git a/services/storage/requirements/_base.in b/services/storage/requirements/_base.in index 3a9e32c0435..6e251342255 100644 --- a/services/storage/requirements/_base.in +++ b/services/storage/requirements/_base.in @@ -1,25 +1,30 @@ -# -# Specifies third-party dependencies for 'storage' -# --r python-with-pandas_89f709.txt # TODO: add reference to git@github.com:ITISFoundation/dockerfiles.git/requirements - -r ../../../packages/postgres-database/requirements/_base.in -r ../../../packages/service-library/requirements/_base.in -psutil>=5.6.6 # Vulnerability https://github.com/advisories/GHSA-qfc5-mcwq-26q8 -urllib3>=1.25.8 # Vulnerability -psycopg2-binary~=2.8.4 # See http://initd.org/psycopg/docs/install.html#binary-install-from-pypi -sqlalchemy~=1.3.3 # https://nvd.nist.gov/vuln/detail/CVE-2019-7164 -boto3==1.9.252 # do not use lastest version, this would require botocore<1.13.0,>=1.12.179 but aiobotocore[boto3]==0.10.2 hardcodes boto3==1.9.91 that requires botocore<1.12.92,>=1.12.91 -aioboto3==6.4.1 -# boto3==1.9.252 (from aiobotocore[boto3]==0.10.4->aioboto3==6.4.1->-r _base.in (line 15)) -# boto3>=1.4 (from blackfynn==3.1.0->-r _base.in (line 19)) + +# These packages introduce incompatible dependencies: +blackfynn +aiobotocore +# since +# botocore<1.18.0,>=1.17.10 (from boto3==1.14.10->blackfynn==3.6.3->-r requirements/_base.in (line 8)) +# botocore<1.15.33,>=1.15.32 (from aiobotocore==1.0.7->-r requirements/_base.in (line 4)) +# +# The limiting constraint is coming from boto3, +# which blackfynn requires as bot3>1.4 (SEE https://github.com/Blackfynn/blackfynn-python/blob/master/requirements.txt#L2) +# +# So we are lowering boto3 until we fit botocore<1.15.33,>=1.15.32 +# ... +# botocore<1.16.0,>=1.15.39 (from boto3==1.12.39->-r requirements/_base.in (line 9)) +# botocore<1.16.0,>=1.15.33 (from boto3==1.12.33->-r requirements/_base.in (line 11)) +# +# We introduce the following extra constraint +boto3==1.12.32 +# --- + aiohttp aiofiles aiopg[sa] -blackfynn==2.11.1 -execnet marshmallow trafaret trafaret_config diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index c9d177c9733..0ebc516e05d 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -4,68 +4,62 @@ # # pip-compile --output-file=requirements/_base.txt requirements/_base.in # -aioboto3==6.4.1 # via -r requirements/_base.in -aiobotocore[boto3]==0.10.4 # via aioboto3 +aiobotocore==1.0.7 # via -r requirements/_base.in aiodebug==1.1.2 # via -r requirements/../../../packages/service-library/requirements/_base.in -aiofiles==0.4.0 # via -r requirements/_base.in +aiofiles==0.5.0 # via -r requirements/_base.in aiohttp==3.6.2 # via -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in, aiobotocore, aiozipkin +aioitertools==0.7.0 # via aiobotocore aiopg[sa]==1.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in aiozipkin==0.6.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -apipkg==1.5 # via execnet -async-generator==1.10 # via aiobotocore async-timeout==3.0.1 # via aiohttp -attrs==19.1.0 # via -r requirements/../../../packages/service-library/requirements/_base.in, aiohttp, jsonschema, openapi-core -blackfynn==2.11.1 # via -r requirements/_base.in -boto3==1.9.252 # via -r requirements/_base.in, aiobotocore, blackfynn -botocore==1.12.252 # via aiobotocore, boto3, s3transfer -certifi==2019.3.9 # via requests +attrs==19.3.0 # via -r requirements/../../../packages/service-library/requirements/_base.in, aiohttp, jsonschema, openapi-core +blackfynn==3.6.3 # via -r requirements/_base.in +boto3==1.12.32 # via -r requirements/_base.in, blackfynn +botocore==1.15.32 # via aiobotocore, boto3, s3transfer +certifi==2020.6.20 # via requests chardet==3.0.4 # via aiohttp, requests -configparser==3.7.4 # via blackfynn -deprecated==1.2.5 # via blackfynn +configparser==5.0.0 # via blackfynn +deprecated==1.2.10 # via blackfynn docopt==0.6.2 # via blackfynn docutils==0.15.2 # via botocore -execnet==1.6.0 # via -r requirements/_base.in -future==0.17.1 # via blackfynn +future==0.18.2 # via blackfynn idna-ssl==1.1.0 # via aiohttp -idna==2.8 # via idna-ssl, requests, yarl -importlib-metadata==1.3.0 # via jsonschema +idna==2.9 # via idna-ssl, requests, yarl +importlib-metadata==1.6.1 # via jsonschema isodate==0.6.0 # via openapi-core -jmespath==0.9.4 # via boto3, botocore +jmespath==0.10.0 # via boto3, botocore jsonschema==3.2.0 # via -r requirements/../../../packages/service-library/requirements/_base.in, openapi-spec-validator -lazy-object-proxy==1.4.3 # via openapi-core -marshmallow==2.19.2 # via -r requirements/_base.in -more-itertools==8.0.2 # via zipp -multidict==4.5.2 # via aiohttp, yarl -numpy==1.16.3 # via -r requirements/python-with-pandas_89f709.txt, blackfynn, pandas +lazy-object-proxy==1.4.3 # via -r requirements/../../../packages/service-library/requirements/_base.in, openapi-core +marshmallow==3.6.1 # via -r requirements/_base.in +multidict==4.7.6 # via aiohttp, yarl openapi-core==0.12.0 # via -r requirements/../../../packages/service-library/requirements/_base.in openapi-spec-validator==0.2.8 # via openapi-core -pandas==0.24.2 # via -r requirements/python-with-pandas_89f709.txt, blackfynn -prometheus-client==0.7.1 # via -r requirements/../../../packages/service-library/requirements/_base.in -protobuf==3.2.0 # via blackfynn -psutil==5.7.0 # via -r requirements/_base.in, blackfynn -psycopg2-binary==2.8.4 # via -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in, aiopg, sqlalchemy -pyrsistent==0.15.6 # via jsonschema -python-dateutil==2.8.0 # via -r requirements/python-with-pandas_89f709.txt, botocore, pandas -pytz==2019.1 # via -r requirements/python-with-pandas_89f709.txt, blackfynn, pandas -pyyaml==5.3 # via -r requirements/../../../packages/service-library/requirements/_base.in, openapi-spec-validator, trafaret-config -requests==2.22.0 # via blackfynn -s3transfer==0.2.1 # via boto3 -semantic-version==2.6.0 # via -r requirements/_base.in -semver==2.8.1 # via blackfynn -six==1.12.0 # via -r requirements/python-with-pandas_89f709.txt, isodate, jsonschema, openapi-core, openapi-spec-validator, protobuf, pyrsistent, python-dateutil, tenacity, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.3 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in, aiopg +prometheus-client==0.8.0 # via -r requirements/../../../packages/service-library/requirements/_base.in +protobuf==3.12.2 # via blackfynn +psutil==5.7.0 # via blackfynn +psycopg2-binary==2.8.5 # via -r requirements/../../../packages/service-library/requirements/_base.in, aiopg, sqlalchemy +pyrsistent==0.16.0 # via jsonschema +python-dateutil==2.8.1 # via blackfynn, botocore +pytz==2020.1 # via blackfynn +pyyaml==5.3.1 # via -r requirements/../../../packages/service-library/requirements/_base.in, openapi-spec-validator, trafaret-config +requests==2.24.0 # via blackfynn +s3transfer==0.3.3 # via boto3 +semantic-version==2.8.5 # via -r requirements/_base.in +semver==2.10.2 # via blackfynn +six==1.15.0 # via isodate, jsonschema, openapi-core, openapi-spec-validator, protobuf, pyrsistent, python-dateutil, tenacity, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, -r requirements/../../../packages/service-library/requirements/_base.in, aiopg strict-rfc3339==0.7 # via openapi-core -tenacity==6.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in +tenacity==6.2.0 # via -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in trafaret-config==2.0.2 # via -r requirements/_base.in trafaret==2.0.2 # via -r requirements/../../../packages/service-library/requirements/_base.in, -r requirements/_base.in, trafaret-config -typing-extensions==3.7.2 # via aiohttp -ujson==1.35 # via -r requirements/../../../packages/service-library/requirements/_base.in -urllib3==1.25.8 # via -r requirements/_base.in, botocore, requests -websocket-client==0.56.0 # via blackfynn -werkzeug==0.16.0 # via -r requirements/../../../packages/service-library/requirements/_base.in -wrapt==1.11.2 # via aiobotocore, deprecated -yarl==1.3.0 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiohttp -zipp==0.6.0 # via importlib-metadata +typing-extensions==3.7.4.2 # via aiohttp, aioitertools +ujson==3.0.0 # via -r requirements/../../../packages/service-library/requirements/_base.in +urllib3==1.25.9 # via botocore, requests +websocket-client==0.57.0 # via blackfynn +werkzeug==1.0.1 # via -r requirements/../../../packages/service-library/requirements/_base.in +wrapt==1.12.1 # via aiobotocore, deprecated +yarl==1.4.2 # via -r requirements/../../../packages/postgres-database/requirements/_base.in, aiohttp +zipp==3.1.0 # via importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/storage/requirements/_test.in b/services/storage/requirements/_test.in index 6568ee93ec1..aa9d22b7ea8 100644 --- a/services/storage/requirements/_test.in +++ b/services/storage/requirements/_test.in @@ -6,8 +6,6 @@ -r _base.txt # testing -coverage==4.5.1 # TODO: Downgraded because of a bug https://github.com/nedbat/coveragepy/issues/716 - pytest pytest-aiohttp # incompatible with pytest-asyncio. See https://github.com/pytest-dev/pytest-asyncio/issues/76 pytest-cov @@ -16,13 +14,10 @@ pytest-instafail pytest-mock pytest-runner pytest-sugar - -# fixtures -virtualenv - - -# tools for CI pylint +# test coverage +coverage coveralls codecov +# remote debugging ptvsd diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index ea3bf2e757f..14c4d4b5ae6 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -4,97 +4,87 @@ # # pip-compile --output-file=requirements/_test.txt requirements/_test.in # -aioboto3==6.4.1 # via -r requirements/_base.txt -aiobotocore[boto3]==0.10.4 # via -r requirements/_base.txt, aioboto3 +aiobotocore==1.0.7 # via -r requirements/_base.txt aiodebug==1.1.2 # via -r requirements/_base.txt -aiofiles==0.4.0 # via -r requirements/_base.txt +aiofiles==0.5.0 # via -r requirements/_base.txt aiohttp==3.6.2 # via -r requirements/_base.txt, aiobotocore, aiozipkin, pytest-aiohttp +aioitertools==0.7.0 # via -r requirements/_base.txt, aiobotocore aiopg[sa]==1.0.0 # via -r requirements/_base.txt aiozipkin==0.6.0 # via -r requirements/_base.txt -apipkg==1.5 # via -r requirements/_base.txt, execnet -appdirs==1.4.4 # via virtualenv astroid==2.4.2 # via pylint -async-generator==1.10 # via -r requirements/_base.txt, aiobotocore async-timeout==3.0.1 # via -r requirements/_base.txt, aiohttp -attrs==19.1.0 # via -r requirements/_base.txt, aiohttp, jsonschema, openapi-core, pytest, pytest-docker -blackfynn==2.11.1 # via -r requirements/_base.txt -boto3==1.9.252 # via -r requirements/_base.txt, aiobotocore, blackfynn -botocore==1.12.252 # via -r requirements/_base.txt, aiobotocore, boto3, s3transfer -certifi==2019.3.9 # via -r requirements/_base.txt, requests +attrs==19.3.0 # via -r requirements/_base.txt, aiohttp, jsonschema, openapi-core, pytest, pytest-docker +blackfynn==3.6.3 # via -r requirements/_base.txt +boto3==1.12.32 # via -r requirements/_base.txt, blackfynn +botocore==1.15.32 # via -r requirements/_base.txt, aiobotocore, boto3, s3transfer +certifi==2020.6.20 # via -r requirements/_base.txt, requests chardet==3.0.4 # via -r requirements/_base.txt, aiohttp, requests codecov==2.1.7 # via -r requirements/_test.in -configparser==3.7.4 # via -r requirements/_base.txt, blackfynn -coverage==4.5.1 # via -r requirements/_test.in, codecov, coveralls, pytest-cov -coveralls==2.1.1 # via -r requirements/_test.in -deprecated==1.2.5 # via -r requirements/_base.txt, blackfynn -distlib==0.3.1 # via virtualenv +configparser==5.0.0 # via -r requirements/_base.txt, blackfynn +coverage==5.1 # via -r requirements/_test.in, codecov, coveralls, pytest-cov +coveralls==2.0.0 # via -r requirements/_test.in +deprecated==1.2.10 # via -r requirements/_base.txt, blackfynn docopt==0.6.2 # via -r requirements/_base.txt, blackfynn, coveralls docutils==0.15.2 # via -r requirements/_base.txt, botocore -execnet==1.6.0 # via -r requirements/_base.txt -filelock==3.0.12 # via virtualenv -future==0.17.1 # via -r requirements/_base.txt, blackfynn +future==0.18.2 # via -r requirements/_base.txt, blackfynn idna-ssl==1.1.0 # via -r requirements/_base.txt, aiohttp -idna==2.8 # via -r requirements/_base.txt, idna-ssl, requests, yarl -importlib-metadata==1.3.0 # via -r requirements/_base.txt, jsonschema, pluggy, pytest, virtualenv -importlib-resources==3.0.0 # via virtualenv +idna==2.9 # via -r requirements/_base.txt, idna-ssl, requests, yarl +importlib-metadata==1.6.1 # via -r requirements/_base.txt, jsonschema, pluggy, pytest isodate==0.6.0 # via -r requirements/_base.txt, openapi-core isort==4.3.21 # via pylint -jmespath==0.9.4 # via -r requirements/_base.txt, boto3, botocore +jmespath==0.10.0 # via -r requirements/_base.txt, boto3, botocore jsonschema==3.2.0 # via -r requirements/_base.txt, openapi-spec-validator lazy-object-proxy==1.4.3 # via -r requirements/_base.txt, astroid, openapi-core -marshmallow==2.19.2 # via -r requirements/_base.txt +marshmallow==3.6.1 # via -r requirements/_base.txt mccabe==0.6.1 # via pylint -more-itertools==8.0.2 # via -r requirements/_base.txt, pytest, zipp -multidict==4.5.2 # via -r requirements/_base.txt, aiohttp, yarl -numpy==1.16.3 # via -r requirements/_base.txt, blackfynn, pandas +more-itertools==8.4.0 # via pytest +multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl openapi-core==0.12.0 # via -r requirements/_base.txt openapi-spec-validator==0.2.8 # via -r requirements/_base.txt, openapi-core packaging==20.4 # via pytest, pytest-sugar -pandas==0.24.2 # via -r requirements/_base.txt, blackfynn pluggy==0.13.1 # via pytest -prometheus-client==0.7.1 # via -r requirements/_base.txt -protobuf==3.2.0 # via -r requirements/_base.txt, blackfynn +prometheus-client==0.8.0 # via -r requirements/_base.txt +protobuf==3.12.2 # via -r requirements/_base.txt, blackfynn psutil==5.7.0 # via -r requirements/_base.txt, blackfynn -psycopg2-binary==2.8.4 # via -r requirements/_base.txt, aiopg, sqlalchemy +psycopg2-binary==2.8.5 # via -r requirements/_base.txt, aiopg, sqlalchemy ptvsd==4.3.2 # via -r requirements/_test.in py==1.9.0 # via pytest pylint==2.5.3 # via -r requirements/_test.in pyparsing==2.4.7 # via packaging -pyrsistent==0.15.6 # via -r requirements/_base.txt, jsonschema +pyrsistent==0.16.0 # via -r requirements/_base.txt, jsonschema pytest-aiohttp==0.3.0 # via -r requirements/_test.in pytest-cov==2.10.0 # via -r requirements/_test.in pytest-docker==0.7.2 # via -r requirements/_test.in pytest-instafail==0.4.2 # via -r requirements/_test.in -pytest-mock==3.2.0 # via -r requirements/_test.in +pytest-mock==3.1.1 # via -r requirements/_test.in pytest-runner==5.2 # via -r requirements/_test.in -pytest-sugar==0.9.4 # via -r requirements/_test.in +pytest-sugar==0.9.3 # via -r requirements/_test.in pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar -python-dateutil==2.8.0 # via -r requirements/_base.txt, botocore, pandas -pytz==2019.1 # via -r requirements/_base.txt, blackfynn, pandas -pyyaml==5.3 # via -r requirements/_base.txt, openapi-spec-validator, trafaret-config -requests==2.22.0 # via -r requirements/_base.txt, blackfynn, codecov, coveralls -s3transfer==0.2.1 # via -r requirements/_base.txt, boto3 -semantic-version==2.6.0 # via -r requirements/_base.txt -semver==2.8.1 # via -r requirements/_base.txt, blackfynn -six==1.12.0 # via -r requirements/_base.txt, astroid, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, protobuf, pyrsistent, python-dateutil, tenacity, virtualenv, websocket-client -sqlalchemy[postgresql_psycopg2binary]==1.3.3 # via -r requirements/_base.txt, aiopg +python-dateutil==2.8.1 # via -r requirements/_base.txt, blackfynn, botocore +pytz==2020.1 # via -r requirements/_base.txt, blackfynn +pyyaml==5.3.1 # via -r requirements/_base.txt, openapi-spec-validator, trafaret-config +requests==2.24.0 # via -r requirements/_base.txt, blackfynn, codecov, coveralls +s3transfer==0.3.3 # via -r requirements/_base.txt, boto3 +semantic-version==2.8.5 # via -r requirements/_base.txt +semver==2.10.2 # via -r requirements/_base.txt, blackfynn +six==1.15.0 # via -r requirements/_base.txt, astroid, isodate, jsonschema, openapi-core, openapi-spec-validator, packaging, protobuf, pyrsistent, python-dateutil, tenacity, websocket-client +sqlalchemy[postgresql_psycopg2binary]==1.3.18 # via -r requirements/_base.txt, aiopg strict-rfc3339==0.7 # via -r requirements/_base.txt, openapi-core -tenacity==6.0.0 # via -r requirements/_base.txt +tenacity==6.2.0 # via -r requirements/_base.txt termcolor==1.1.0 # via pytest-sugar toml==0.10.1 # via pylint trafaret-config==2.0.2 # via -r requirements/_base.txt trafaret==2.0.2 # via -r requirements/_base.txt, trafaret-config typed-ast==1.4.1 # via astroid -typing-extensions==3.7.2 # via -r requirements/_base.txt, aiohttp -ujson==1.35 # via -r requirements/_base.txt -urllib3==1.25.8 # via -r requirements/_base.txt, botocore, requests -virtualenv==20.0.26 # via -r requirements/_test.in +typing-extensions==3.7.4.2 # via -r requirements/_base.txt, aiohttp, aioitertools +ujson==3.0.0 # via -r requirements/_base.txt +urllib3==1.25.9 # via -r requirements/_base.txt, botocore, requests wcwidth==0.2.5 # via pytest -websocket-client==0.56.0 # via -r requirements/_base.txt, blackfynn -werkzeug==0.16.0 # via -r requirements/_base.txt -wrapt==1.11.2 # via -r requirements/_base.txt, aiobotocore, astroid, deprecated -yarl==1.3.0 # via -r requirements/_base.txt, aiohttp -zipp==0.6.0 # via -r requirements/_base.txt, importlib-metadata, importlib-resources +websocket-client==0.57.0 # via -r requirements/_base.txt, blackfynn +werkzeug==1.0.1 # via -r requirements/_base.txt +wrapt==1.12.1 # via -r requirements/_base.txt, aiobotocore, astroid, deprecated +yarl==1.4.2 # via -r requirements/_base.txt, aiohttp +zipp==3.1.0 # via -r requirements/_base.txt, importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/services/storage/requirements/python-with-pandas_89f709.txt b/services/storage/requirements/python-with-pandas_89f709.txt deleted file mode 100644 index 9adaccff774..00000000000 --- a/services/storage/requirements/python-with-pandas_89f709.txt +++ /dev/null @@ -1,8 +0,0 @@ -# -# Taken from https://github.com/ITISFoundation/dockerfiles/blob/89f70938b66c0e4c1966c9a62da7a6dc89778f45/python-with-pandas/3.6-alpine/requirements.txt -# -numpy==1.16.3 -pandas==0.24.2 -python-dateutil==2.8.0 # via pandas -pytz==2019.1 # via pandas -six==1.12.0 # via python-dateutil From 68996fc692de4a407c9c253fc83de4de3ffe0049 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 21:23:17 +0200 Subject: [PATCH 03/20] Minor fixes to pass tests w/o errors or warnings --- services/storage/Makefile | 6 +++--- services/storage/src/simcore_service_storage/dsm.py | 13 +++++-------- services/storage/tests/test_configs.py | 8 ++++---- 3 files changed, 12 insertions(+), 15 deletions(-) diff --git a/services/storage/Makefile b/services/storage/Makefile index 5c312b658e9..e7306162cf7 100644 --- a/services/storage/Makefile +++ b/services/storage/Makefile @@ -3,7 +3,7 @@ # include ../../scripts/common.Makefile -APP_NAME := $(notdir $(CURDIR)) +APP_NAME := $(notdir $(CURDIR)) .PHONY: openapi-specs openapi-specs: ## updates and validates openapi specifications @@ -22,6 +22,6 @@ tests: ## runs unit tests @pytest -vv --exitfirst --failed-first --durations=10 --pdb $(CURDIR)/tests -.PHONY: build -build: openapi-specs ## builds docker image (using main services/docker-compose-build.yml) +.PHONY: build build-devel +build build-devel: openapi-specs ## builds docker image (using main services/docker-compose-build.yml) @$(MAKE) -C ${REPO_BASE_DIR} $@ target=${APP_NAME} diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index 668724589ca..a479b9b3c97 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -279,9 +279,8 @@ async def list_files( # same as above, make sure file is physically present on s3 clean_data = [] - # MaG: This is inefficient: Do this automatically when file is modified - _loop = asyncio.get_event_loop() - session = aiobotocore.get_session(loop=_loop) + # TODO: MaG: This is inefficient: Do this automatically when file is modified + session = aiobotocore.get_session() async with session.create_client( "s3", endpoint_url=self.s3_client.endpoint_url, @@ -636,8 +635,7 @@ async def deep_copy_project_simcore_s3( uuid_name_dict[new_node_id] = src_node["label"] # Step 1: List all objects for this project replace them with the destination object name and do a copy at the same time collect some names - _loop = asyncio.get_event_loop() - session = aiobotocore.get_session(loop=_loop) + session = aiobotocore.get_session() async with session.create_client( "s3", endpoint_url=self.s3_client.endpoint_url, @@ -701,7 +699,7 @@ async def deep_copy_project_simcore_s3( output["path"] = dest # step 3: list files first to create fmds - session = aiobotocore.get_session(loop=_loop) + session = aiobotocore.get_session() fmds = [] async with session.create_client( "s3", @@ -762,8 +760,7 @@ async def delete_project_simcore_s3( delete_me = delete_me.where(file_meta_data.c.node_id == node_id) await conn.execute(delete_me) - _loop = asyncio.get_event_loop() - session = aiobotocore.get_session(loop=_loop) + session = aiobotocore.get_session() async with session.create_client( "s3", endpoint_url=self.s3_client.endpoint_url, diff --git a/services/storage/tests/test_configs.py b/services/storage/tests/test_configs.py index 052271fc912..8a640b5d629 100644 --- a/services/storage/tests/test_configs.py +++ b/services/storage/tests/test_configs.py @@ -16,21 +16,21 @@ CONFIG_DIR = "data" -@pytest.fixture("session") +@pytest.fixture(scope="session") def env_devel_file(osparc_simcore_root_dir): env_devel_fpath = osparc_simcore_root_dir / ".env-devel" assert env_devel_fpath.exists() return env_devel_fpath -@pytest.fixture("session") +@pytest.fixture(scope="session") def services_docker_compose_file(osparc_simcore_root_dir): dcpath = osparc_simcore_root_dir / "services" / "docker-compose.yml" assert dcpath.exists() return dcpath -@pytest.fixture("session") +@pytest.fixture(scope="session") def devel_environ(env_devel_file): PATTERN_ENVIRON_EQUAL = re.compile(r"^(\w+)=(.*)$") env_devel = {} @@ -69,7 +69,7 @@ def test_variable_expansions(sample, expected_match): assert not match -@pytest.fixture("session") +@pytest.fixture(scope="session") def container_environ( services_docker_compose_file, devel_environ, osparc_simcore_root_dir ): From 85d9daa61459b6a090d1c261bec8d7abb86f0375 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 21:38:15 +0200 Subject: [PATCH 04/20] Added more filters to autoformat/isort --- scripts/common.Makefile | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/scripts/common.Makefile b/scripts/common.Makefile index eb8b6b5b7c1..0bebf0b83eb 100644 --- a/scripts/common.Makefile +++ b/scripts/common.Makefile @@ -94,7 +94,12 @@ info: ## displays basic info .PHONY: autoformat autoformat: ## runs black python formatter on this service's code. Use AFTER make install-* # sort imports - @python3 -m isort --atomic -rc $(CURDIR) + @python3 -m isort --verbose \ + --atomic \ + --recursive \ + --skip-glob */client-sdk/* \ + --skip-glob */migration/* \ + $(CURDIR) # auto formatting with black @python3 -m black --verbose \ --exclude "/(\.eggs|\.git|\.hg|\.mypy_cache|\.nox|\.tox|\.venv|\.svn|_build|buck-out|build|dist|migration|client-sdk|generated_code)/" \ From b748fbee3e5271f7593c0752316c36208e34e900 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 21:38:34 +0200 Subject: [PATCH 05/20] Formatted --- services/storage/docker/healthcheck.py | 1 - .../src/simcore_service_storage/__version__.py | 1 - .../storage/src/simcore_service_storage/datcore.py | 3 ++- .../src/simcore_service_storage/datcore_wrapper.py | 4 +--- services/storage/src/simcore_service_storage/db.py | 3 ++- services/storage/src/simcore_service_storage/dsm.py | 3 +-- .../storage/src/simcore_service_storage/models.py | 4 ++-- services/storage/src/simcore_service_storage/rest.py | 1 - .../src/simcore_service_storage/rest_config.py | 1 + services/storage/src/simcore_service_storage/s3.py | 3 ++- services/storage/tests/test_package.py | 11 ++++------- services/storage/tests/utils.py | 4 ++-- 12 files changed, 17 insertions(+), 22 deletions(-) diff --git a/services/storage/docker/healthcheck.py b/services/storage/docker/healthcheck.py index 5a5edba5230..3faa018b291 100644 --- a/services/storage/docker/healthcheck.py +++ b/services/storage/docker/healthcheck.py @@ -18,7 +18,6 @@ import os import sys - from urllib.request import urlopen SUCCESS, UNHEALTHY = 0, 1 diff --git a/services/storage/src/simcore_service_storage/__version__.py b/services/storage/src/simcore_service_storage/__version__.py index c839c64ff0c..d6ecd01f9df 100644 --- a/services/storage/src/simcore_service_storage/__version__.py +++ b/services/storage/src/simcore_service_storage/__version__.py @@ -2,7 +2,6 @@ """ import pkg_resources - from semantic_version import Version __version__: str = pkg_resources.get_distribution("simcore_service_storage").version diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index ddc72bc8cd0..f8bd5efdfca 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -14,6 +14,7 @@ from blackfynn import Blackfynn from blackfynn.models import BaseCollection, Collection, DataPackage + from simcore_service_storage.models import DatasetMetaData, FileMetaData, FileMetaDataEx from simcore_service_storage.settings import DATCORE_ID, DATCORE_STR @@ -345,7 +346,7 @@ def download_file(self, source, filename, destination_path): url = self.download_link(source, filename) if url: - _file = urllib.URLopener() # nosec + _file = urllib.URLopener() # nosec _file.retrieve(url, destination_path) return True return False diff --git a/services/storage/src/simcore_service_storage/datcore_wrapper.py b/services/storage/src/simcore_service_storage/datcore_wrapper.py index 46c756b29b6..400fd410cfe 100644 --- a/services/storage/src/simcore_service_storage/datcore_wrapper.py +++ b/services/storage/src/simcore_service_storage/datcore_wrapper.py @@ -74,9 +74,7 @@ def __init__( ) except Exception: self.d_client = None # Disabled: any call will raise AttributeError - logger.warning( - "Failed to setup datcore. Disabling client.", exc_info=True - ) + logger.warning("Failed to setup datcore. Disabling client.", exc_info=True) @property def is_communication_enabled(self) -> bool: diff --git a/services/storage/src/simcore_service_storage/db.py b/services/storage/src/simcore_service_storage/db.py index 59119b3d657..623d266f273 100644 --- a/services/storage/src/simcore_service_storage/db.py +++ b/services/storage/src/simcore_service_storage/db.py @@ -1,6 +1,8 @@ import logging from aiohttp import web +from tenacity import Retrying + from servicelib.aiopg_utils import ( DataSourceName, PostgresRetryPolicyUponInitialization, @@ -9,7 +11,6 @@ is_pg_responsive, raise_if_not_responsive, ) -from tenacity import Retrying from .models import metadata from .settings import APP_CONFIG_KEY, APP_DB_ENGINE_KEY diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index a479b9b3c97..abd345e8d0a 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -6,7 +6,7 @@ import tempfile from concurrent.futures import ThreadPoolExecutor from pathlib import Path -from typing import Dict, List, Tuple, Optional +from typing import Dict, List, Optional, Tuple import aiobotocore import aiofiles @@ -44,7 +44,6 @@ SIMCORE_S3_STR, ) - # pylint: disable=no-value-for-parameter # FIXME: E1120:No value for argument 'dml' in method call diff --git a/services/storage/src/simcore_service_storage/models.py b/services/storage/src/simcore_service_storage/models.py index 2954b9397a5..d3d6959fb0d 100644 --- a/services/storage/src/simcore_service_storage/models.py +++ b/services/storage/src/simcore_service_storage/models.py @@ -9,12 +9,12 @@ from simcore_postgres_database.storage_models import ( file_meta_data, + groups, metadata, projects, tokens, - users, - groups, user_to_groups, + users, ) from simcore_service_storage.settings import DATCORE_STR, SIMCORE_S3_ID, SIMCORE_S3_STR diff --git a/services/storage/src/simcore_service_storage/rest.py b/services/storage/src/simcore_service_storage/rest.py index 05e3a989b05..6238f2cc5e4 100644 --- a/services/storage/src/simcore_service_storage/rest.py +++ b/services/storage/src/simcore_service_storage/rest.py @@ -14,7 +14,6 @@ from .resources import resources from .settings import APP_OPENAPI_SPECS_KEY - log = logging.getLogger(__name__) diff --git a/services/storage/src/simcore_service_storage/rest_config.py b/services/storage/src/simcore_service_storage/rest_config.py index c05e30c90ea..75f9a315298 100644 --- a/services/storage/src/simcore_service_storage/rest_config.py +++ b/services/storage/src/simcore_service_storage/rest_config.py @@ -4,6 +4,7 @@ - settings """ import trafaret as T + from servicelib.config_schema_utils import minimal_addon_schema from .settings import APP_OPENAPI_SPECS_KEY diff --git a/services/storage/src/simcore_service_storage/s3.py b/services/storage/src/simcore_service_storage/s3.py index f54bc7d5a81..918c0300a70 100644 --- a/services/storage/src/simcore_service_storage/s3.py +++ b/services/storage/src/simcore_service_storage/s3.py @@ -6,9 +6,10 @@ from typing import Dict from aiohttp import web -from s3wrapper.s3_client import S3Client from tenacity import before_sleep_log, retry, stop_after_attempt, wait_fixed +from s3wrapper.s3_client import S3Client + from .settings import APP_CONFIG_KEY, APP_S3_KEY from .utils import RETRY_COUNT, RETRY_WAIT_SECS diff --git a/services/storage/tests/test_package.py b/services/storage/tests/test_package.py index 634ae56c65d..921ab481c38 100644 --- a/services/storage/tests/test_package.py +++ b/services/storage/tests/test_package.py @@ -1,14 +1,11 @@ -# TODO: W0611:Unused import ... -# pylint: disable=W0611 -# TODO: W0613:Unused argument ... -# pylint: disable=W0613 -# W0621: Redefining name ... from outer scope -# pylint: disable=W0621 +# pylint:disable=unused-variable +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name -from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing import pytest +from pytest_simcore.helpers.utils_pylint import assert_pylint_is_passing from simcore_service_storage.cli import main diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index 45086dba81b..e53b9f8b86b 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -11,10 +11,10 @@ from simcore_service_storage.models import ( FileMetaData, file_meta_data, - projects, - users, groups, + projects, user_to_groups, + users, ) log = logging.getLogger(__name__) From b389e1d735b999f4895c6884769013f6b5e5d0c1 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 21:55:59 +0200 Subject: [PATCH 06/20] Adds pandas in test reqs --- services/storage/requirements/_test.in | 5 +++++ services/storage/requirements/_test.txt | 6 ++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/services/storage/requirements/_test.in b/services/storage/requirements/_test.in index aa9d22b7ea8..04fd8d28583 100644 --- a/services/storage/requirements/_test.in +++ b/services/storage/requirements/_test.in @@ -15,9 +15,14 @@ pytest-mock pytest-runner pytest-sugar pylint + # test coverage coverage coveralls codecov + # remote debugging ptvsd + +# utils +pandas diff --git a/services/storage/requirements/_test.txt b/services/storage/requirements/_test.txt index 14c4d4b5ae6..1fc6f0da219 100644 --- a/services/storage/requirements/_test.txt +++ b/services/storage/requirements/_test.txt @@ -39,9 +39,11 @@ marshmallow==3.6.1 # via -r requirements/_base.txt mccabe==0.6.1 # via pylint more-itertools==8.4.0 # via pytest multidict==4.7.6 # via -r requirements/_base.txt, aiohttp, yarl +numpy==1.19.0 # via pandas openapi-core==0.12.0 # via -r requirements/_base.txt openapi-spec-validator==0.2.8 # via -r requirements/_base.txt, openapi-core packaging==20.4 # via pytest, pytest-sugar +pandas==1.0.5 # via -r requirements/_test.in pluggy==0.13.1 # via pytest prometheus-client==0.8.0 # via -r requirements/_base.txt protobuf==3.12.2 # via -r requirements/_base.txt, blackfynn @@ -60,8 +62,8 @@ pytest-mock==3.1.1 # via -r requirements/_test.in pytest-runner==5.2 # via -r requirements/_test.in pytest-sugar==0.9.3 # via -r requirements/_test.in pytest==5.4.3 # via -r requirements/_test.in, pytest-aiohttp, pytest-cov, pytest-instafail, pytest-mock, pytest-sugar -python-dateutil==2.8.1 # via -r requirements/_base.txt, blackfynn, botocore -pytz==2020.1 # via -r requirements/_base.txt, blackfynn +python-dateutil==2.8.1 # via -r requirements/_base.txt, blackfynn, botocore, pandas +pytz==2020.1 # via -r requirements/_base.txt, blackfynn, pandas pyyaml==5.3.1 # via -r requirements/_base.txt, openapi-spec-validator, trafaret-config requests==2.24.0 # via -r requirements/_base.txt, blackfynn, codecov, coveralls s3transfer==0.3.3 # via -r requirements/_base.txt, boto3 From 2f6054a40af9562479fba64e122fcd3fd4f262b3 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 23:00:51 +0200 Subject: [PATCH 07/20] Fixes for codelimate --- .../src/simcore_service_storage/dsm.py | 45 +++++++++---------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index abd345e8d0a..8175d2a2808 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -673,29 +673,28 @@ async def deep_copy_project_simcore_s3( # Step 2: List all references in outputs that point to datcore and copy over for node_id, node in destination_project["workbench"].items(): - outputs = node.get("outputs") - if outputs is not None: - for _output_key, output in outputs.items(): - if "store" in output and output["store"] == DATCORE_ID: - src = output["path"] - dest = str(Path(dest_folder) / node_id) - logger.info("Need to copy %s to %s", src, dest) - dest = await self.copy_file_datcore_s3( - user_id=user_id, - dest_uuid=dest, - source_uuid=src, - filename_missing=True, - ) - # and change the dest project accordingly - output["store"] = SIMCORE_S3_ID - output["path"] = dest - elif "store" in output and output["store"] == SIMCORE_S3_ID: - source = output["path"] - dest = dest = str( - Path(dest_folder) / node_id / Path(source).name - ) - output["store"] = SIMCORE_S3_ID - output["path"] = dest + outputs = node.get("outputs", []) + for _output_key, output in outputs.items(): + if "store" in output and output["store"] == DATCORE_ID: + src = output["path"] + dest = str(Path(dest_folder) / node_id) + logger.info("Need to copy %s to %s", src, dest) + dest = await self.copy_file_datcore_s3( + user_id=user_id, + dest_uuid=dest, + source_uuid=src, + filename_missing=True, + ) + # and change the dest project accordingly + output["store"] = SIMCORE_S3_ID + output["path"] = dest + elif "store" in output and output["store"] == SIMCORE_S3_ID: + source = output["path"] + dest = dest = str( + Path(dest_folder) / node_id / Path(source).name + ) + output["store"] = SIMCORE_S3_ID + output["path"] = dest # step 3: list files first to create fmds session = aiobotocore.get_session() From 37ac1545cf818d75b850127883ae1310df42190c Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 25 Jun 2020 23:01:04 +0200 Subject: [PATCH 08/20] Add code-analysis in makefiles --- .gitignore | 4 ++++ scripts/common.Makefile | 12 ++++++++++++ 2 files changed, 16 insertions(+) diff --git a/.gitignore b/.gitignore index 37d25e34ac0..961056331f0 100644 --- a/.gitignore +++ b/.gitignore @@ -151,3 +151,7 @@ prof/ # outputs from make .stack-*.yml + + +# Copies +services/**/.codeclimate.yml diff --git a/scripts/common.Makefile b/scripts/common.Makefile index 0bebf0b83eb..2553142dc4b 100644 --- a/scripts/common.Makefile +++ b/scripts/common.Makefile @@ -109,6 +109,18 @@ autoformat: ## runs black python formatter on this service's code. Use AFTER mak mypy: $(REPO_BASE_DIR)/scripts/mypy.bash $(REPO_BASE_DIR)/mypy.ini ## runs mypy python static type checker on this services's code. Use AFTER make install-* @$(REPO_BASE_DIR)/scripts/mypy.bash src +.PHONY: code-analysis +code-analysis: $(REPO_BASE_DIR)/.codeclimate.yml ## runs code-climate analysis + # Copying config + cp $(REPO_BASE_DIR)/.codeclimate.yml $(CURDIR)/.codeclimate.yml + # Validates $< at ${PWD} + $(REPO_BASE_DIR)/scripts/code-climate.bash validate-config + # Running analysis + $(REPO_BASE_DIR)/scripts/code-climate.bash analyze + # Removing tmp config + @-rm $(CURDIR)/.codeclimate.yml + + .PHONY: version-patch version-minor version-major version-patch: ## commits version with bug fixes not affecting the cookiecuter config $(_bumpversion) From f5ee986b7da701d57ff64f1f1408861515e00f22 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 26 Jun 2020 01:21:11 +0200 Subject: [PATCH 09/20] Minor update codegen.sh --- services/storage/client-sdk/codegen.sh | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/services/storage/client-sdk/codegen.sh b/services/storage/client-sdk/codegen.sh index c822f095bf9..c09f847389f 100755 --- a/services/storage/client-sdk/codegen.sh +++ b/services/storage/client-sdk/codegen.sh @@ -1,9 +1,9 @@ -#/bin/bash +#! /bin/bash ../../../scripts/openapi/openapi_codegen.sh \ - -i ../src/simcore_service_storage/oas3/v0/openapi.yaml \ - -o . \ - -g python \ - -c ./codegen_config.json + -i ../src/simcore_service_storage/api/v0/openapi.yaml \ + -o . \ + -g python \ + -c ./codegen_config.json -# rm -f output.yaml \ No newline at end of file +# rm -f output.yaml From 9ac21458f2f0983d94ab99d00926f7de6fa9aad3 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 26 Jun 2020 01:24:06 +0200 Subject: [PATCH 10/20] Added alternative call to generator --- services/storage/client-sdk/Makefile | 72 ++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) create mode 100644 services/storage/client-sdk/Makefile diff --git a/services/storage/client-sdk/Makefile b/services/storage/client-sdk/Makefile new file mode 100644 index 00000000000..8745ea99d39 --- /dev/null +++ b/services/storage/client-sdk/Makefile @@ -0,0 +1,72 @@ +# AUTO-GENERATION of python client SDK +# +# +# SEE https://openapi-generator.tech/docs/usage#generate +# SEE https://openapi-generator.tech/docs/generators/python +# + +# +# WARNING: DO NOT USE until codegen.sh is deprecated! +# TODO: Current generator will NOT produce the same client-sdk code since +# bundling openapi.yaml did not preserve e.g. the same schema models !! +# + +.DEFAULT_GOAL := generator-help + + +PACKAGE_NAME := simcore_service_storage_sdk + +GIT_USER_ID := ITISFoundation +GIT_REPO_ID := osparc-simcore + +GENERATOR_NAME := python + +REPO_BASE_DIR := $(shell git rev-parse --show-toplevel) +SCRIPTS_DIR := $(abspath $(REPO_BASE_DIR)/scripts) +OAS_PATH := $(abspath $(CURDIR)/../src/simcore_service_storage/api/v0/openapi.yaml) +REL_CURDIR := $(subst $(REPO_BASE_DIR)/,,$(CURDIR)) +FRAGMENT := \#egg=$(PACKAGE_NAME)&subdirectory=$(REL_CURDIR) + +ADDITIONAL_PROPS := \ + generateSourceCodeOnly=false\ + hideGenerationTimestamp=true\ + library=asyncio\ + packageName=$(PACKAGE_NAME)\ + packageUrl="https://github.com/$(GIT_USER_ID)/${GIT_REPO_ID}.git$(FRAGMENT)"\ + packageVersion=$(APP_VERSION)\ + projectName=simcore-service-storage-sdk\ + projectDescription="Data storage manager service client's SDK"\ + +ADDITIONAL_PROPS := $(foreach prop,$(ADDITIONAL_PROPS),$(strip $(prop))) + +null := +space := $(null) # +comma := , + + +.PHONY: python-client generator-help + +openapi.yaml: $(OAS_PATH) + cp $< $@ + +python-client: openapi.yaml ## runs python client generator + cd $(CURDIR); \ + $(SCRIPTS_DIR)/openapi-generator-cli.bash generate \ + --generator-name=$(GENERATOR_NAME) \ + --git-user-id=$(GIT_USER_ID)\ + --git-repo-id=$(GIT_REPO_ID)\ + --http-user-agent="$(PACKAGE_NAME)/{packageVersion}/{language}"\ + --input-spec=/local/$< \ + --output=/local/$@ \ + --additional-properties=$(subst $(space),$(comma),$(strip $(ADDITIONAL_PROPS)))\ + --package-name=$(PACKAGE_NAME)\ + --release-note="Updated to $(APP_VERSION)" + # deleting $< + -@rm $< + + +generator-help: ## help on client-api generator + # generate help + @$(SCRIPTS_DIR)/openapi-generator-cli.bash help generate + # generator config help + @$(SCRIPTS_DIR)/openapi-generator-cli.bash config-help -g $(GENERATOR_NAME) From 1fa50b1d6ea9d0aabd42f998d6db4d317524790e Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 26 Jun 2020 19:52:37 +0200 Subject: [PATCH 11/20] Fixes mock in test_datcore --- services/storage/tests/test_datcore.py | 15 ++++++++------- services/storage/tests/utils.py | 4 ++-- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/services/storage/tests/test_datcore.py b/services/storage/tests/test_datcore.py index 0bc6244b98a..1ff881db95b 100644 --- a/services/storage/tests/test_datcore.py +++ b/services/storage/tests/test_datcore.py @@ -2,7 +2,6 @@ # pylint:disable=unused-argument # pylint:disable=redefined-outer-name -import importlib import os from concurrent.futures import ThreadPoolExecutor from pathlib import Path @@ -10,18 +9,20 @@ import pytest import utils -from simcore_service_storage import datcore from simcore_service_storage.datcore_wrapper import DatcoreWrapper @pytest.fixture() def mocked_blackfynn_unavailable(mocker): def raise_error(*args, **kargs): - raise RuntimeError("mocked_blackfynn_unavailable") - - mock = mocker.patch("blackfynn.Blackfynn", raise_error) - importlib.reload(datcore) - return mock + raise RuntimeError( + "Emulating unavailable services." + "Error raised upon construction of Blackfynn. " + "SEE pytest.fixture mocked_blackfynn_unavailable" + ) + + mock = mocker.patch("simcore_service_storage.datcore.Blackfynn", raise_error) + yield mock async def test_datcore_unavailable(loop, mocked_blackfynn_unavailable): diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index e53b9f8b86b..a31da01aeea 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -40,8 +40,8 @@ def data_dir(): def has_datcore_tokens() -> bool: - token = os.environ.get("BF_API_KEY", "none") - if token == "none": + token = os.environ.get("BF_API_KEY") + if not token: pytest.skip("Datcore access tokens not available, skipping test") return False return True From 8919db8df4bad9d14d996ad920a7f160c99aa1bd Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 26 Jun 2020 19:52:50 +0200 Subject: [PATCH 12/20] Minor --- services/storage/Dockerfile | 1 + services/storage/src/simcore_service_storage/datcore.py | 3 ++- services/storage/tests/utils.py | 9 ++++++--- 3 files changed, 9 insertions(+), 4 deletions(-) diff --git a/services/storage/Dockerfile b/services/storage/Dockerfile index 33fa6144763..9c39bc2fb70 100644 --- a/services/storage/Dockerfile +++ b/services/storage/Dockerfile @@ -156,6 +156,7 @@ FROM build as development ENV SC_BUILD_TARGET=development \ SC_DEVEL_MOUNT=/devel/services/storage/ +WORKDIR /devel RUN chown -R scu:scu ${VIRTUAL_ENV} diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index f8bd5efdfca..08c721fe51e 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -46,7 +46,8 @@ def _get_collection_id( class DatcoreClient(object): def __init__(self, api_token=None, api_secret=None, host=None, streaming_host=None): - # WARNING: contruction raise exception if service is not available. Use datacore_wrapper for safe calls + # WARNING: contruction raise exception if service is not available. + # Use datacore_wrapper for safe calls self.client = Blackfynn( profile=None, api_token=api_token, diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index a31da01aeea..4844a326ab7 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -40,9 +40,12 @@ def data_dir(): def has_datcore_tokens() -> bool: - token = os.environ.get("BF_API_KEY") - if not token: - pytest.skip("Datcore access tokens not available, skipping test") + # TODO: activate tests against BF services in the CI. + # + # CI shall add BF_API_KEY, BF_API_SECRET environs as secrets + # + if not os.environ.get("BF_API_KEY") or not os.environ.get("BF_API_SECRET"): + pytest.skip("Datcore access API tokens not available, skipping test") return False return True From 81a341114c33fc8ad63f78cc83fe093e9c077191 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 26 Jun 2020 20:28:20 +0200 Subject: [PATCH 13/20] Minor changes --- services/storage/tests/test_dsm.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index 036247e3548..ec037bf46d4 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -1,9 +1,10 @@ -# pylint:disable=unused-variable -# pylint:disable=unused-argument -# pylint:disable=redefined-outer-name +# pylint: disable=unused-variable +# pylint: disable=unused-argument +# pylint: disable=redefined-outer-name # pylint: disable=too-many-arguments -# pylint:disable=no-name-in-module -# pylint:disable=no-member +# pylint: disable=no-name-in-module +# pylint: disable=no-member +# pylint: disable=too-many-branches import copy import datetime @@ -28,8 +29,6 @@ def test_mockup(dsm_mockup_db): assert len(dsm_mockup_db) == 100 -# Too many branches (13/12) (too-many-branches) -# pylint: disable=R0912 async def test_dsm_s3(dsm_mockup_db, dsm_fixture): id_name_map = {} id_file_count = {} From 5ba562dc678b2dd6eeb2b9bc4df243c6e2e65bc7 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Fri, 26 Jun 2020 20:36:37 +0200 Subject: [PATCH 14/20] Enabling testing against bf service --- .github/workflows/ci-testing-deploy.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 38413da95fe..a23d6c8dc14 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -16,6 +16,8 @@ env: DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} + BF_API_KEY: ${{ secrets.BF_API_KEY }} + BF_API_SECRET: ${{ secrets.BF_API_SECRET }} CC_TEST_REPORTER_ID: 21a72eb30476c870140b1576258873a41be6692f71bd9aebe812174b7d8f4b4e #enable buildkit DOCKER_BUILDKIT: 1 From 986499b832227d77d7d70792b505c56d2e7f4eb5 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 29 Jun 2020 16:47:51 +0200 Subject: [PATCH 15/20] Fixes lack of agent to upload files --- .../src/simcore_service_storage/datcore.py | 70 ++++++++++--------- .../datcore_wrapper.py | 59 ++++++++-------- services/storage/tests/conftest.py | 21 ++++-- services/storage/tests/test_dsm.py | 6 +- services/storage/tests/utils.py | 2 +- 5 files changed, 86 insertions(+), 72 deletions(-) diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index 08c721fe51e..cfac9a2fb0d 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -3,6 +3,7 @@ requires Blackfynn, check Makefile env2 """ +# FIXME: refactor!!! # pylint: skip-file import logging @@ -48,7 +49,8 @@ class DatcoreClient(object): def __init__(self, api_token=None, api_secret=None, host=None, streaming_host=None): # WARNING: contruction raise exception if service is not available. # Use datacore_wrapper for safe calls - self.client = Blackfynn( + # TODO: can use https://developer.blackfynn.io/python/latest/configuration.html#environment-variables + self._bf = Blackfynn( profile=None, api_token=api_token, api_secret=api_secret, @@ -60,7 +62,7 @@ def profile(self): """ Returns profile of current User """ - return self.client.profile + return self._bf.profile def _collection_from_destination(self, destination: str): destination_path = Path(destination) @@ -79,21 +81,21 @@ def _collection_from_destination(self, destination: str): collections = list(object_path.parts) collection_id = "" collection_id = _get_collection_id(dataset, collections, collection_id) - collection = self.client.get(collection_id) + collection = self._bf.get(collection_id) return collection, collection_id def _destination_from_id(self, destination_id: str): - destination = self.client.get(destination_id) + destination = self._bf.get(destination_id) if destination is None: - destination = self.client.get_dataset(destination_id) + destination = self._bf.get_dataset(destination_id) return destination def list_files_recursively(self, dataset_filter: str = ""): files = [] - for dataset in self.client.datasets(): + for dataset in self._bf.datasets(): if not dataset_filter or dataset_filter in dataset.name: self.list_dataset_files_recursively(files, dataset, Path(dataset.name)) @@ -106,9 +108,9 @@ def list_files_raw_dataset(self, dataset_id: str) -> List[FileMetaDataEx]: cursor = "" page_size = 1000 - api = self.client._api.datasets + api = self._bf._api.datasets - dataset = self.client.get_dataset(dataset_id) + dataset = self._bf.get_dataset(dataset_id) if dataset is not None: while True: resp = api._get( @@ -174,7 +176,7 @@ def list_files_raw_dataset(self, dataset_id: str) -> List[FileMetaDataEx]: def list_files_raw(self, dataset_filter: str = "") -> List[FileMetaDataEx]: _files = [] - for dataset in self.client.datasets(): + for dataset in self._bf.datasets(): _files = _files + self.list_files_raw_dataset(dataset.id) return _files @@ -221,7 +223,7 @@ def list_dataset_files_recursively( ) files.append(fmd) - def create_dataset(self, ds_name, force_delete=False): + def create_dataset(self, ds_name,*, force_delete=False): """ Creates a new dataset for the current user and returns it. Returns existing one if there is already a dataset with the given name. @@ -230,16 +232,15 @@ def create_dataset(self, ds_name, force_delete=False): ds_name (str): Name for the dataset (_,-,' ' and capitalization are ignored) force_delete (bool, optional): Delete first if dataset already exists """ - ds = None with suppress(Exception): - ds = self.client.get_dataset(ds_name) + ds = self._bf.get_dataset(ds_name) if force_delete: ds.delete() ds = None if ds is None: - ds = self.client.create_dataset(ds_name) + ds = self._bf.create_dataset(ds_name) return ds @@ -254,10 +255,10 @@ def get_dataset(self, ds_name, create_if_not_exists=False): ds = None with suppress(Exception): - ds = self.client.get_dataset(ds_name) + ds = self._bf.get_dataset(ds_name) if ds is None and create_if_not_exists: - ds = self.client.create_dataset(ds_name) + ds = self._bf.create_dataset(ds_name) return ds @@ -272,7 +273,7 @@ def delete_dataset(self, ds_name): # this is not supported ds = self.get_dataset(ds_name) if ds is not None: - self.client.delete(ds.id) + self._bf.delete(ds.id) def exists_dataset(self, ds_name): """ @@ -285,7 +286,7 @@ def exists_dataset(self, ds_name): ds = self.get_dataset(ds_name) return ds is not None - def upload_file(self, destination: str, filepath: str, meta_data=None): + def upload_file(self, destination: str, filepath: str, meta_data=None) -> bool: """ Uploads a file to a given dataset/collection given its filepath on the host. Optionally adds some meta data @@ -308,7 +309,7 @@ def upload_file(self, destination: str, filepath: str, meta_data=None): files = [ filepath, ] - self.client._api.io.upload_files(collection, files, display_progress=True) + self._bf._api.io.upload_files(collection, files, display_progress=True, use_agent=False) collection.update() if meta_data is not None: @@ -361,8 +362,8 @@ def download_link(self, destination, filename): for item in collection: if isinstance(item, DataPackage): if Path(item.files[0].as_dict()["content"]["s3key"]).name == filename: - file_desc = self.client._api.packages.get_sources(item.id)[0] - url = self.client._api.packages.get_presigned_url_for_file( + file_desc = self._bf._api.packages.get_sources(item.id)[0] + url = self._bf._api.packages.get_presigned_url_for_file( item.id, file_desc.id ) return url @@ -375,12 +376,12 @@ def download_link_by_id(self, file_id): """ url = "" filename = "" - package = self.client.get(file_id) + package = self._bf.get(file_id) if package is not None: filename = Path(package.files[0].as_dict()["content"]["s3key"]).name - file_desc = self.client._api.packages.get_sources(file_id)[0] - url = self.client._api.packages.get_presigned_url_for_file( + file_desc = self._bf._api.packages.get_sources(file_id)[0] + url = self._bf._api.packages.get_presigned_url_for_file( file_id, file_desc.id ) @@ -419,20 +420,21 @@ def delete_file(self, destination, filename): for item in collection: if isinstance(item, DataPackage): if Path(item.files[0].as_dict()["content"]["s3key"]).name == filename: - self.client.delete(item) + self._bf.delete(item) return True return False - def delete_file_by_id(self, id: str): + def delete_file_by_id(self, id: str) -> bool: """ Deletes file by id Args: datcore id for the file """ - package = self.client.get(id) + package: DataPackage = self._bf.get(id) package.delete() + return not package.exists def delete_files(self, destination): """ @@ -449,7 +451,7 @@ def delete_files(self, destination): collection.update() for item in collection: - self.client.delete(item) + self._bf.delete(item) def update_meta_data(self, dataset, filename, meta_data): """ @@ -514,7 +516,7 @@ def search(self, what, max_count): what (str): query max_count (int): Max number of results to return """ - return self.client.search(what, max_count) + return self._bf.search(what, max_count) def upload_file_to_id(self, destination_id: str, filepath: str): """ @@ -534,16 +536,18 @@ def upload_file_to_id(self, destination_id: str, filepath: str): if destination is None: return _id - files = [filepath] + files = [filepath, ] try: - result = self.client._api.io.upload_files( - destination, files, display_progress=True + # TODO: PC->MAG: should protected API + # TODO: add new agent SEE https://developer.blackfynn.io/python/latest/CHANGELOG.html#id31 + result = self._bf._api.io.upload_files( + destination, files, display_progress=True, use_agent=False ) if result and result[0] and "package" in result[0][0]: _id = result[0][0]["package"]["content"]["id"] - except Exception: + except Exception: # pylint: disable= logger.exception("Error uploading file to datcore") return _id @@ -571,7 +575,7 @@ def create_collection(self, destination_id: str, collection_name: str): def list_datasets(self) -> DatasetMetaDataVec: data = [] - for dataset in self.client.datasets(): + for dataset in self._bf.datasets(): dmd = DatasetMetaData(dataset_id=dataset.id, display_name=dataset.name) data.append(dmd) diff --git a/services/storage/src/simcore_service_storage/datcore_wrapper.py b/services/storage/src/simcore_service_storage/datcore_wrapper.py index 400fd410cfe..36c4604a6bc 100644 --- a/services/storage/src/simcore_service_storage/datcore_wrapper.py +++ b/services/storage/src/simcore_service_storage/datcore_wrapper.py @@ -5,7 +5,7 @@ from contextlib import contextmanager from functools import wraps from pathlib import Path -from typing import List +from typing import List, Optional, Tuple import attr @@ -116,19 +116,22 @@ def list_files_raw_dataset( return files @make_async - def delete_file(self, destination: str, filename: str): + def delete_file(self, destination: str, filename: str) -> bool: # the object can be found in dataset/filename <-> bucket_name/object_name + ok = False with safe_call(error_msg="Error deleting datcore file"): - self.d_client.delete_file(destination, filename) + ok = self.d_client.delete_file(destination, filename) + return ok @make_async - def delete_file_by_id(self, file_id: str): - + def delete_file_by_id(self, file_id: str) -> bool: + ok = False with safe_call(error_msg="Error deleting datcore file"): - self.d_client.delete_file_by_id(file_id) + ok = self.d_client.delete_file_by_id(file_id) + return ok @make_async - def download_link(self, destination: str, filename: str): + def download_link(self, destination: str, filename: str) -> str: url = "" with safe_call(error_msg="Error getting datcore download link"): url = self.d_client.download_link(destination, filename) @@ -136,7 +139,7 @@ def download_link(self, destination: str, filename: str): return url @make_async - def download_link_by_id(self, file_id: str): + def download_link_by_id(self, file_id: str) -> Tuple[str, str]: url = "" filename = "" with safe_call(error_msg="Error getting datcore download link"): @@ -145,20 +148,18 @@ def download_link_by_id(self, file_id: str): return url, filename @make_async - def create_test_dataset(self, dataset): - + def create_test_dataset(self, dataset_name: str) -> Optional[str]: with safe_call(error_msg="Error creating test dataset"): - ds = self.d_client.get_dataset(dataset) + ds = self.d_client.get_dataset(dataset_name) if ds is not None: - self.d_client.delete_files(dataset) + self.d_client.delete_files(dataset_name) else: - ds = self.d_client.create_dataset(dataset) + ds = self.d_client.create_dataset(dataset_name) return ds.id - return "" + return None @make_async - def delete_test_dataset(self, dataset): - + def delete_test_dataset(self, dataset) -> None: with safe_call(error_msg="Error deleting test dataset"): ds = self.d_client.get_dataset(dataset) if ds is not None: @@ -167,43 +168,43 @@ def delete_test_dataset(self, dataset): @make_async def upload_file( self, destination: str, local_path: str, meta_data: FileMetaData = None - ): - result = False + ) -> bool: + ok = False str_meta = json.dumps(attr.asdict(meta_data)) if meta_data else "" with safe_call(error_msg="Error uploading file to datcore"): if str_meta: meta_data = json.loads(str_meta) - result = self.d_client.upload_file(destination, local_path, meta_data) + ok = self.d_client.upload_file(destination, local_path, meta_data) else: - result = self.d_client.upload_file(destination, local_path) - return result + ok = self.d_client.upload_file(destination, local_path) + return ok @make_async - def upload_file_to_id(self, destination_id: str, local_path: str): - _id = "" - + def upload_file_to_id(self, destination_id: str, local_path: str) -> Optional[str]: + _id = None with safe_call(error_msg="Error uploading file to datcore"): _id = self.d_client.upload_file_to_id(destination_id, local_path) - return _id @make_async - def create_collection(self, destination_id: str, collection_name: str): - _id = "" + def create_collection( + self, destination_id: str, collection_name: str + ) -> Optional[str]: + _id = None with safe_call(error_msg="Error creating collection in datcore"): _id = self.d_client.create_collection(destination_id, collection_name) return _id @make_async - def list_datasets(self): + def list_datasets(self) -> List: data = [] with safe_call(error_msg="Error creating collection in datcore"): data = self.d_client.list_datasets() return data @make_async - def ping(self): + def ping(self) -> bool: ok = False with safe_call(skip_logs=True): profile = self.d_client.profile() diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index 84dc5d0f92e..fa52de277a8 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -350,21 +350,28 @@ async def datcore_structured_testbucket(loop, mock_files_factory): dcw = DatcoreWrapper(api_token, api_secret, loop, pool) dataset_id = await dcw.create_test_dataset(BUCKET_NAME) + assert dataset_id, f"Could not create dataset {BUCKET_NAME}" + tmp_files = mock_files_factory(3) + # first file to the root - file_id1 = await dcw.upload_file_to_id(dataset_id, os.path.normpath(tmp_files[0])) + filename1 = os.path.normpath(tmp_files[0]) + file_id1 = await dcw.upload_file_to_id(dataset_id, filename1) + assert file_id1, f"Could not upload {filename1} to the root of {BUCKET_NAME}" + # create first level folder collection_id1 = await dcw.create_collection(dataset_id, "level1") + # upload second file - file_id2 = await dcw.upload_file_to_id( - collection_id1, os.path.normpath(tmp_files[1]) - ) + filename2 = os.path.normpath(tmp_files[1]) + file_id2 = await dcw.upload_file_to_id(collection_id1, filename2) + assert file_id2, f"Could not upload {filename2} to the {BUCKET_NAME}/level1" # create 3rd level folder + filename3 = os.path.normpath(tmp_files[2]) collection_id2 = await dcw.create_collection(collection_id1, "level2") - file_id3 = await dcw.upload_file_to_id( - collection_id2, os.path.normpath(tmp_files[2]) - ) + file_id3 = await dcw.upload_file_to_id(collection_id2, filename3) + assert file_id3, f"Could not upload {filename3} to the {BUCKET_NAME}/level1/level2" yield { "dataset_id": dataset_id, diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index ec037bf46d4..3f1f46fe1a7 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -230,18 +230,20 @@ async def test_dsm_datcore( return utils.create_tables(url=postgres_service_url) + dsm = dsm_fixture user_id = "0" data = await dsm.list_files( user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME ) - # the fixture creates two files + # the fixture creates 3 files assert len(data) == 3 # delete the first one fmd_to_delete = data[0].fmd print("Deleting", fmd_to_delete.bucket_name, fmd_to_delete.object_name) - await dsm.delete_file(user_id, DATCORE_STR, fmd_to_delete.file_id) + is_deleted = await dsm.delete_file(user_id, DATCORE_STR, fmd_to_delete.file_id) + assert is_deleted data = await dsm.list_files( user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME diff --git a/services/storage/tests/utils.py b/services/storage/tests/utils.py index 4844a326ab7..a5d55ea283e 100644 --- a/services/storage/tests/utils.py +++ b/services/storage/tests/utils.py @@ -27,7 +27,7 @@ ACCESS_KEY = "12345678" SECRET_KEY = "12345678" -BUCKET_NAME = "simcore-testing" +BUCKET_NAME = "simcore-testing-bucket" USER_ID = "0" From 536f3ed69c88a0a69487a5c180f0d3d7448f0292 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 29 Jun 2020 18:18:22 +0200 Subject: [PATCH 16/20] Fixes argument --- .../src/simcore_service_storage/datcore.py | 28 +++++++++++-------- .../src/simcore_service_storage/dsm.py | 5 ++-- 2 files changed, 20 insertions(+), 13 deletions(-) diff --git a/services/storage/src/simcore_service_storage/datcore.py b/services/storage/src/simcore_service_storage/datcore.py index cfac9a2fb0d..e7d548b35d8 100644 --- a/services/storage/src/simcore_service_storage/datcore.py +++ b/services/storage/src/simcore_service_storage/datcore.py @@ -11,10 +11,10 @@ import urllib from contextlib import suppress from pathlib import Path -from typing import List +from typing import List, Union from blackfynn import Blackfynn -from blackfynn.models import BaseCollection, Collection, DataPackage +from blackfynn.models import BaseCollection, Collection, DataPackage, Dataset from simcore_service_storage.models import DatasetMetaData, FileMetaData, FileMetaDataEx from simcore_service_storage.settings import DATCORE_ID, DATCORE_STR @@ -86,9 +86,13 @@ def _collection_from_destination(self, destination: str): return collection, collection_id def _destination_from_id(self, destination_id: str): - destination = self._bf.get(destination_id) + # NOTE: .get(*) logs + # INFO:blackfynn.client.Blackfynn:Unable to retrieve object + # if destination_id refers to a Dataset + + destination: Union[DataPackage, Collection] = self._bf.get(destination_id) if destination is None: - destination = self._bf.get_dataset(destination_id) + destination: Dataset = self._bf.get_dataset(destination_id) return destination @@ -223,7 +227,7 @@ def list_dataset_files_recursively( ) files.append(fmd) - def create_dataset(self, ds_name,*, force_delete=False): + def create_dataset(self, ds_name, *, force_delete=False): """ Creates a new dataset for the current user and returns it. Returns existing one if there is already a dataset with the given name. @@ -309,7 +313,9 @@ def upload_file(self, destination: str, filepath: str, meta_data=None) -> bool: files = [ filepath, ] - self._bf._api.io.upload_files(collection, files, display_progress=True, use_agent=False) + self._bf._api.io.upload_files( + collection, files, display_progress=True, use_agent=False + ) collection.update() if meta_data is not None: @@ -381,9 +387,7 @@ def download_link_by_id(self, file_id): filename = Path(package.files[0].as_dict()["content"]["s3key"]).name file_desc = self._bf._api.packages.get_sources(file_id)[0] - url = self._bf._api.packages.get_presigned_url_for_file( - file_id, file_desc.id - ) + url = self._bf._api.packages.get_presigned_url_for_file(file_id, file_desc.id) return url, filename @@ -536,7 +540,9 @@ def upload_file_to_id(self, destination_id: str, filepath: str): if destination is None: return _id - files = [filepath, ] + files = [ + filepath, + ] try: # TODO: PC->MAG: should protected API @@ -547,7 +553,7 @@ def upload_file_to_id(self, destination_id: str, filepath: str): if result and result[0] and "package" in result[0][0]: _id = result[0][0]["package"]["content"]["id"] - except Exception: # pylint: disable= + except Exception: logger.exception("Error uploading file to datcore") return _id diff --git a/services/storage/src/simcore_service_storage/dsm.py b/services/storage/src/simcore_service_storage/dsm.py index 8175d2a2808..7b517e5bdb3 100644 --- a/services/storage/src/simcore_service_storage/dsm.py +++ b/services/storage/src/simcore_service_storage/dsm.py @@ -278,7 +278,7 @@ async def list_files( # same as above, make sure file is physically present on s3 clean_data = [] - # TODO: MaG: This is inefficient: Do this automatically when file is modified + # FIXME: MaG: This is inefficient: Do this automatically when file is modified session = aiobotocore.get_session() async with session.create_client( "s3", @@ -551,6 +551,7 @@ async def copy_file_datcore_s3( s3_upload_link = await self.upload_link(user_id, dest_uuid) + # FIXME: user of mkdtemp is RESPONSIBLE to deleting it https://docs.python.org/3/library/tempfile.html#tempfile.mkdtemp tmp_dirpath = tempfile.mkdtemp() local_file_path = os.path.join(tmp_dirpath, filename) session = get_client_session(self.app) @@ -741,7 +742,7 @@ async def deep_copy_project_simcore_s3( await conn.execute(ins) async def delete_project_simcore_s3( - self, user_id: str, project_id: str, node_id: Optional[str] + self, user_id: str, project_id: str, node_id: Optional[str]=None ) -> web.Response: """ Deletes all files from a given node in a project in simcore.s3 and updated db accordingly. If node_id is not given, then all the project files db entries are deleted. From fab40b0262bdac8211651537a3e7fadd706eae5b Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 29 Jun 2020 18:25:34 +0200 Subject: [PATCH 17/20] WARNING: added some sleep in tests! --- services/storage/tests/test_dsm.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/services/storage/tests/test_dsm.py b/services/storage/tests/test_dsm.py index 3f1f46fe1a7..cefd557b144 100644 --- a/services/storage/tests/test_dsm.py +++ b/services/storage/tests/test_dsm.py @@ -245,6 +245,8 @@ async def test_dsm_datcore( is_deleted = await dsm.delete_file(user_id, DATCORE_STR, fmd_to_delete.file_id) assert is_deleted + import time; time.sleep(1) # FIXME: takes some time to delete!! + data = await dsm.list_files( user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME ) @@ -293,6 +295,9 @@ async def test_dsm_s3_to_datcore( destination_id=datcore_structured_testbucket["coll2_id"], ) + #FIXME: upload takes some time + import time; time.sleep(1) + data = await dsm.list_files( user_id=user_id, location=DATCORE_STR, uuid_filter=BUCKET_NAME ) From fa983e3d4bcfe7a7f82eb40f02a17178d18ee7d9 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Mon, 29 Jun 2020 18:25:55 +0200 Subject: [PATCH 18/20] Adde application so that it could use aiohtp client --- services/storage/tests/conftest.py | 27 +++++++++++++++++++++++---- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/services/storage/tests/conftest.py b/services/storage/tests/conftest.py index fa52de277a8..e7b2d2b1498 100644 --- a/services/storage/tests/conftest.py +++ b/services/storage/tests/conftest.py @@ -16,15 +16,18 @@ from typing import Tuple import pytest +from aiohttp import web from aiopg.sa import create_engine import simcore_service_storage import utils +from servicelib.application import create_safe_application from simcore_service_storage.datcore_wrapper import DatcoreWrapper from simcore_service_storage.dsm import DataStorageManager, DatCoreApiToken from simcore_service_storage.models import FileMetaData from simcore_service_storage.settings import SIMCORE_S3_STR -from utils import ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, USER_ID +from utils import (ACCESS_KEY, BUCKET_NAME, DATABASE, PASS, SECRET_KEY, USER, + USER_ID) current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent sys.path.append(str(current_dir / "helpers")) @@ -324,17 +327,33 @@ async def datcore_testbucket(loop, mock_files_factory): @pytest.fixture(scope="function") -def dsm_fixture(s3_client, postgres_engine, loop): +def moduleless_app(loop, aiohttp_server) -> web.Application: + app: web.Application = create_safe_application() + # creates a dummy server + server = loop.run_until_complete(aiohttp_server(app)) + # server is destroyed on exit https://docs.aiohttp.org/en/stable/testing.html#pytest_aiohttp.aiohttp_server + return app + + +@pytest.fixture(scope="function") +def dsm_fixture(s3_client, postgres_engine, loop, moduleless_app): pool = ThreadPoolExecutor(3) + dsm_fixture = DataStorageManager( - s3_client, postgres_engine, loop, pool, BUCKET_NAME, False + s3_client=s3_client, + engine=postgres_engine, + loop=loop, + pool=pool, + simcore_bucket_name=BUCKET_NAME, + has_project_db=False, + app=moduleless_app, ) api_token = os.environ.get("BF_API_KEY", "none") api_secret = os.environ.get("BF_API_SECRET", "none") dsm_fixture.datcore_tokens[USER_ID] = DatCoreApiToken(api_token, api_secret) - return dsm_fixture + yield dsm_fixture @pytest.fixture(scope="function") From 509d3e4fff119b7acd18e849c0de28ee9e388f05 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Tue, 30 Jun 2020 10:38:54 +0200 Subject: [PATCH 19/20] Removed bf api tokens from CI --- .github/workflows/ci-testing-deploy.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index a23d6c8dc14..38413da95fe 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -16,8 +16,6 @@ env: DOCKER_REGISTRY: ${{ secrets.DOCKER_REGISTRY }} DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - BF_API_KEY: ${{ secrets.BF_API_KEY }} - BF_API_SECRET: ${{ secrets.BF_API_SECRET }} CC_TEST_REPORTER_ID: 21a72eb30476c870140b1576258873a41be6692f71bd9aebe812174b7d8f4b4e #enable buildkit DOCKER_BUILDKIT: 1 From bc14f2f2cd462a8bf05b3d054349855957a964f3 Mon Sep 17 00:00:00 2001 From: Pedro Crespo <32402063+pcrespov@users.noreply.github.com> Date: Thu, 16 Jul 2020 21:27:47 +0200 Subject: [PATCH 20/20] Fixes bucket name in fixture --- services/storage/tests/data/file_meta_data.csv | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/services/storage/tests/data/file_meta_data.csv b/services/storage/tests/data/file_meta_data.csv index e9a8fb3f582..fa473ea55a4 100644 --- a/services/storage/tests/data/file_meta_data.csv +++ b/services/storage/tests/data/file_meta_data.csv @@ -1,3 +1,3 @@ file_uuid,location_id,location,bucket_name,object_name,project_id,project_name,node_id,node_name,file_name,user_id,user_name -161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,0,simcore.s3,simcore-testing,161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,161b8782-b13e-5840-9ae2-e2250c231001,"",ad9bda7f-1dc5-5480-ab22-5fef4fc53eac,"",outputController.dat,21,"" -161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,0,simcore.s3,simcore-testing,161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,161b8782-b13e-5840-9ae2-e2250c231001,"",a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8,"",notebooks.zip,21,"" +161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,0,simcore.s3,simcore-testing-bucket,161b8782-b13e-5840-9ae2-e2250c231001/ad9bda7f-1dc5-5480-ab22-5fef4fc53eac/outputController.dat,161b8782-b13e-5840-9ae2-e2250c231001,"",ad9bda7f-1dc5-5480-ab22-5fef4fc53eac,"",outputController.dat,21,"" +161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,0,simcore.s3,simcore-testing-bucket,161b8782-b13e-5840-9ae2-e2250c231001/a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8/notebooks.zip,161b8782-b13e-5840-9ae2-e2250c231001,"",a3941ea0-37c4-5c1d-a7b3-01b5fd8a80c8,"",notebooks.zip,21,""