diff --git a/docs/source/advanced_usage.rst b/docs/source/advanced_usage.rst index 9c7926db..9898ea81 100644 --- a/docs/source/advanced_usage.rst +++ b/docs/source/advanced_usage.rst @@ -230,4 +230,23 @@ Finally, if the image is small enough, SCAR allows to upload it in the function To help with the creation of slim images, you can use `minicon `_. Minicon is a general tool to analyze applications and executions of these applications to obtain a filesystem that contains all the dependencies that have been detected. -By using minicon the size of the cowsay image was reduced from 170MB to 11MB. \ No newline at end of file +By using minicon the size of the cowsay image was reduced from 170MB to 11MB. + +Setting a specific VPC +---------------------- + +You can also set an specific VPC parameters to configure the network in you lambda functions. +You only have to add the ``vpc`` field setting the subnets and security groups as shown in the +following example:: + + functions: + aws: + - lambda: + vpc: + SubnetIds: + - subnet-00000000000000000 + SecurityGroupIds: + - sg-00000000000000000 + name: scar-cowsay + container: + image: grycap/cowsay \ No newline at end of file diff --git a/docs/source/image_env_usage.rst b/docs/source/image_env_usage.rst new file mode 100644 index 00000000..5907aa94 --- /dev/null +++ b/docs/source/image_env_usage.rst @@ -0,0 +1,163 @@ +Using Lambda Image Environment +============================== + +Scar uses by default the python3.7 Lambda environment using udocker program to execute the containers. +In 2021 AWS added native support to ECR container images. Scar also supports to use this environment +to execute your containers. + +To use it you only have to set to ``image`` the lamda ``runtime`` property setting. +You can set it in the scar configuration file:: + + { + "aws": { + "lambda": { + "runtime": "image" + } + } + } + +Or in the function definition file:: + + functions: + aws: + - lambda: + runtime: image + name: scar-function + memory: 2048 + init_script: script.sh + container: + image: image/name + +Or event set it as a parameter in the ``init`` scar call:: + + scar init -f function_def.yaml -rt image + +In this case the scar client will prepare the image and upload it to AWS ECR as required by the +Lambda Image Environment. + +To use this functionality you should use `supervisor `_ +version 1.5.0 or newer. + +Using the image runtime the scar client will build a new container image adding the supervisor and +other needed files to the user provided image. This image will be then uploaded to an ECR registry +to enable Lambda environment to create the function. So the user that executes the scar client +must have the ability to execute the docker commands (be part of the ``docker`` group, see +`docker documentation `_) + + +Use alpine based images +----------------------- + +Using the container image environment there is no limitation to use alpine based images (musl based). +You only have to add the ``alpine`` flag in the function definition:: + + functions: + aws: + - lambda: + runtime: image + name: scar-function + memory: 2048 + init_script: script.sh + container: + image: image/name + alpine: true + +If you use an alpine based image and you do not set the ``alpine`` flag you will get an execution Error:: + + Error: fork/exec /var/task/supervisor: no such file or directory + +Use already prepared ECR images +-------------------------------- + +You can also use a previously prepared ECR image instead of building it and and pushing to ECR. +In this case you have to specify the full ECR image name and add set to false the ``create_image`` +flag in the function definition:: + + functions: + aws: + - lambda: + runtime: image + name: scar-function + memory: 2048 + init_script: script.sh + container: + image: 000000000000.dkr.ecr.us-east-1.amazonaws.com/scar-function + create_image: false + +Do not delete ECR image on function deletion +-------------------------------------------- + +By default the scar client deletes the ECR image in the function deletion process. +If you want to maintain it for future functions you can modify the scar configuration +file and set to false ``delete_image`` flag in the ecr configuration section:: + + { + "aws": { + "ecr": { + "delete_image": false + } + } + } + +Or set it in the function definition:: + + functions: + aws: + - lambda: + runtime: image + name: scar-function + memory: 2048 + init_script: script.sh + container: + image: image/name + ecr: + delete_image: false + +ARM64 support +------------- + +Using the container image environment you can also specify the architecture to execute your lambda +function (x86_64 or arm64) setting the architectures field in the function definition. If not set +the default architecture will be used (x86_64):: + + functions: + aws: + - lambda: + runtime: image + architectures: + - arm64 + name: scar-function + memory: 2048 + init_script: script.sh + container: + image: image/name + +EFS support +------------ + +Using the container image environment you can also configure file system access for your Lambda function. +First you have to set the VPC parameters to use the same subnet where the EFS is deployed. Also verify +that the iam role set in the scar configuration has the correct permissions and the Security Groups is +properly configured to enable access to NFS port (see `Configuring file system access for Lambda functions `_). +Then you have to add the ``file_system`` field setting the arns and mount paths of the file systems to mount +as shown in the following example:: + + + functions: + aws: + - lambda: + runtime: image + vpc: + SubnetIds: + - subnet-00000000000000000 + SecurityGroupIds: + - sg-00000000000000000 + file_system: + - Arn: arn:aws:elasticfilesystem:us-east-1:000000000000:access-point/fsap-00000000000000000 + LocalMountPath: /mnt/efs + name: scar-function + memory: 2048 + init_script: script.sh + container: + image: image/name + diff --git a/docs/source/index.rst b/docs/source/index.rst index 83c7e657..cf2ff716 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -13,6 +13,7 @@ Welcome to SCAR's documentation! configuration basic_usage advanced_usage + image_env_usage api_gateway batch prog_model diff --git a/examples/cowsay-ecr/Dockerfile b/examples/cowsay-ecr/Dockerfile new file mode 100644 index 00000000..5fc16988 --- /dev/null +++ b/examples/cowsay-ecr/Dockerfile @@ -0,0 +1,18 @@ +FROM ubuntu:16.04 + +# Include global arg in this stage of the build +ARG FUNCTION_DIR="/var/task" +# Create function directory +RUN mkdir -p ${FUNCTION_DIR} +# Set working directory to function root directory +WORKDIR ${FUNCTION_DIR} + +# Copy function code +COPY awslambdaric ${FUNCTION_DIR} +COPY function_config.yaml ${FUNCTION_DIR} +COPY test.sh ${FUNCTION_DIR} + +ENV PATH="${FUNCTION_DIR}:${PATH}" + +ENTRYPOINT [ "awslambdaric" ] +CMD [ "faassupervisor.supervisor.main" ] diff --git a/examples/cowsay-ecr/basic-cow.yaml b/examples/cowsay-ecr/basic-cow.yaml new file mode 100644 index 00000000..77004361 --- /dev/null +++ b/examples/cowsay-ecr/basic-cow.yaml @@ -0,0 +1,9 @@ +functions: + aws: + - lambda: + init_script: test.sh + name: scar-ecr-cowsay + container: + #alpine: true + #image: alpine:3.14 + image: ubuntu:20.04 diff --git a/examples/cowsay-ecr/function_config.yaml b/examples/cowsay-ecr/function_config.yaml new file mode 100644 index 00000000..9fb01c21 --- /dev/null +++ b/examples/cowsay-ecr/function_config.yaml @@ -0,0 +1,34 @@ +asynchronous: false +boto_profile: default +config_path: '' +container: + environment: + Variables: {} + image: 974349055189.dkr.ecr.us-east-1.amazonaws.com/micafer:latest + timeout_threshold: 10 +deployment: + max_payload_size: 52428800 + max_s3_payload_size: 262144000 +environment: + Variables: {} +description: Automatically generated lambda function +execution_mode: lambda +handler: scar-ecr-cowsay.lambda_handler +init_script: test.sh +invocation_type: RequestResponse +layers: [] +log_level: DEBUG +log_type: Tail +memory: 512 +name: scar-ecr-cowsay +region: us-east-1 +runtime: image +storage_providers: {} +supervisor: + layer_name: faas-supervisor + license_info: Apache 2.0 + version: 1.4.2 +tags: + createdby: scar + owner: AIDAJNGRVV2UDO2O4TS4O +timeout: 300 \ No newline at end of file diff --git a/examples/cowsay-ecr/test.sh b/examples/cowsay-ecr/test.sh new file mode 100755 index 00000000..1ded5043 --- /dev/null +++ b/examples/cowsay-ecr/test.sh @@ -0,0 +1,4 @@ +#!/bin/sh +env +cat ${INPUT_FILE_PATH} +echo "OK1" diff --git a/requirements.txt b/requirements.txt index 79b3c2e4..22e06a93 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,3 +6,4 @@ requests pyyaml setuptools>=40.8.0 packaging +docker diff --git a/scar/parser/cli/__init__.py b/scar/parser/cli/__init__.py index 48d8bf34..2b990aab 100644 --- a/scar/parser/cli/__init__.py +++ b/scar/parser/cli/__init__.py @@ -62,7 +62,7 @@ def _parse_lambda_args(cmd_args: Dict) -> Dict: lambda_arg_list = ['name', 'asynchronous', 'init_script', 'run_script', 'c_args', 'memory', 'timeout', 'timeout_threshold', 'image', 'image_file', 'description', 'lambda_role', 'extra_payload', ('environment', 'environment_variables'), - 'layers', 'lambda_environment', 'list_layers', 'log_level', 'preheat'] + 'layers', 'lambda_environment', 'list_layers', 'log_level', 'preheat', 'runtime'] lambda_args = DataTypesUtils.parse_arg_list(lambda_arg_list, cmd_args) # Standardize log level if defined if "log_level" in lambda_args: diff --git a/scar/parser/cli/parents.py b/scar/parser/cli/parents.py index 3639f2f1..9f1801d5 100644 --- a/scar/parser/cli/parents.py +++ b/scar/parser/cli/parents.py @@ -63,6 +63,7 @@ def create_function_definition_parser(): function_definition_parser.add_argument("-sv", "--supervisor-version", help=("FaaS Supervisor version. " "Can be a tag or 'latest'.")) + function_definition_parser.add_argument("-rt", "--runtime", help="Lambda runtime") # Batch (job definition) options function_definition_parser.add_argument("-bm", "--batch-memory", help="Batch job memory in megabytes") diff --git a/scar/parser/cli/subparsers.py b/scar/parser/cli/subparsers.py index 3578034f..474f50bf 100644 --- a/scar/parser/cli/subparsers.py +++ b/scar/parser/cli/subparsers.py @@ -117,7 +117,7 @@ def _add_rm_parser(self): help="Delete all lambda functions", action="store_true") group.add_argument("-f", "--conf-file", - help="Yaml file with the function configuration") + help="Yaml file with the function configuration") def _add_log_parser(self): log = self.subparser.add_parser('log', diff --git a/scar/providers/aws/__init__.py b/scar/providers/aws/__init__.py index 195c17af..a2000c28 100644 --- a/scar/providers/aws/__init__.py +++ b/scar/providers/aws/__init__.py @@ -23,6 +23,7 @@ from scar.providers.aws.clients.resourcegroups import ResourceGroupsClient from scar.providers.aws.clients.s3 import S3Client from scar.providers.aws.clients.ec2 import EC2Client +from scar.providers.aws.clients.ecr import ElasticContainerRegistryClient class GenericClient(): @@ -37,7 +38,8 @@ class GenericClient(): 'LAMBDA': LambdaClient, 'RESOURCEGROUPS': ResourceGroupsClient, 'S3': S3Client, - 'LAUNCHTEMPLATES': EC2Client} + 'LAUNCHTEMPLATES': EC2Client, + 'ECR': ElasticContainerRegistryClient} def __init__(self, resource_info: Dict =None): self.properties = {} diff --git a/scar/providers/aws/batchfunction.py b/scar/providers/aws/batchfunction.py index 9cfadedd..376b9530 100644 --- a/scar/providers/aws/batchfunction.py +++ b/scar/providers/aws/batchfunction.py @@ -188,7 +188,7 @@ def _get_container_properties_single_node_args(self): ] } if self.batch.get('enable_gpu'): - job_def_args['containerProperties']['resourceRequirements'] = [ + job_def_args['resourceRequirements'] = [ { 'value': '1', 'type': 'GPU' diff --git a/scar/providers/aws/clients/ecr.py b/scar/providers/aws/clients/ecr.py new file mode 100644 index 00000000..7d51ab92 --- /dev/null +++ b/scar/providers/aws/clients/ecr.py @@ -0,0 +1,67 @@ +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module with the class necessary to manage the +Cloudwatch Logs creation, deletion and configuration.""" + +import base64 +from datetime import datetime +from typing import Dict +from scar.exceptions import exception +from scar.providers.aws.clients import BotoClient +import scar.logger as logger + + +class ElasticContainerRegistryClient(BotoClient): + """A low-level client representing Amazon Elastic Container Registry. + DOC_URL: https://boto3.readthedocs.io/en/latest/reference/services/ecr.html""" + + # Parameter used by the parent to create the appropriate boto3 client + _BOTO_CLIENT_NAME = 'ecr' + + def __init__(self, client_args: Dict): + super().__init__(client_args) + self.token = None + + @exception(logger) + def get_authorization_token(self) -> str: + """Retrieves an authorization token.""" + if self.token: + now = datetime.now() + if self.token['expiresAt'] > (now + 60): + return self.token["authorizationToken"] + + response = self.client.get_authorization_token() + self.token = response["authorizationData"][0] + return base64.b64decode(self.token["authorizationToken"]).decode().split(':') + + @exception(logger) + def get_registry_id(self) -> str: + response = self.client.describe_registry() + return response["registryId"] + + @exception(logger) + def describe_repositories(self, **kwargs: Dict) -> str: + try: + response = self.client.describe_repositories(**kwargs) + return response + except Exception: + return None + + @exception(logger) + def create_repository(self, repository_name: str): + return self.client.create_repository(repositoryName=repository_name) + + @exception(logger) + def delete_repository(self, repository_name: str): + return self.client.delete_repository(repositoryName=repository_name, force=True) \ No newline at end of file diff --git a/scar/providers/aws/cloudwatchlogs.py b/scar/providers/aws/cloudwatchlogs.py index 10db574f..8a4fdb87 100644 --- a/scar/providers/aws/cloudwatchlogs.py +++ b/scar/providers/aws/cloudwatchlogs.py @@ -91,8 +91,7 @@ def _get_batch_job_log(self, jobs_info: List) -> str: kwargs['logStreamNames'] = [job.get("container", {}).get("logStreamName", "")] batch_events = self.client.get_log_events(**kwargs) msgs = [event.get('message', '') - for response in batch_events - for event in response.get("events", {})] + for event in batch_events] batch_logs += '\n'.join(msgs) return batch_logs diff --git a/scar/providers/aws/containerimage.py b/scar/providers/aws/containerimage.py new file mode 100644 index 00000000..1f2bdd89 --- /dev/null +++ b/scar/providers/aws/containerimage.py @@ -0,0 +1,160 @@ +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os.path +import docker +import scar.logger as logger +from typing import Dict, Set +from scar.providers.aws.ecr import ECR +from scar.utils import FileUtils, SupervisorUtils +from scar.providers.aws.functioncode import create_function_config + + +class ContainerImage: + + @staticmethod + def get_asset_name(function_info: Dict) -> str: + arch = function_info.get('architectures', ['x86_64'])[0] + arch = '' if arch == 'x86_64' else "-%s" % arch + alpine = '-alpine' if function_info.get('container').get('alpine') else '' + return 'supervisor%s%s.zip' % (alpine, arch) + + @staticmethod + def delete_ecr_image(resources_info: Dict) -> None: + """Delete the ECR repository created in _create_ecr_image function.""" + ecr_cli = ECR(resources_info) + repo_name = resources_info.get('lambda').get('name') + if ecr_cli.get_repository_uri(repo_name): + logger.info('Deleting ECR repo: %s' % repo_name) + ecr_cli.delete_repository(repo_name) + + @staticmethod + def get_supervisor_zip(resources_info: Dict, supervisor_version: str) -> str: + """Get from cache or download supervisor zip.""" + asset_name = ContainerImage.get_asset_name(resources_info.get('lambda')) + cached, supervisor_zip_path = SupervisorUtils.is_supervisor_asset_cached(asset_name, supervisor_version) + if cached: + # It is cached, do not download again + logger.debug('Using supervisor asset cached file: ver: %s, asset: %s' % (supervisor_version, asset_name)) + return supervisor_zip_path + else: + logger.debug('Downloading supervisor asset file: ver: %s, asset: %s' % (supervisor_version, asset_name)) + return SupervisorUtils.download_supervisor_asset( + supervisor_version, + asset_name, + supervisor_zip_path + ) + + @staticmethod + def create_ecr_image(resources_info: Dict, supervisor_version: str) -> str: + """Creates an ECR image using the user provided image adding the supervisor tools.""" + # If the user set an already prepared image return the image name + image_name = ContainerImage._ecr_image_name_prepared(resources_info.get('lambda').get('container')) + if image_name: + return image_name + + tmp_folder = FileUtils.create_tmp_dir() + + # Create function config file + FileUtils.write_yaml(FileUtils.join_paths(tmp_folder.name, "function_config.yaml"), + create_function_config(resources_info)) + + init_script_path = resources_info.get('lambda').get('init_script') + # Copy the init script defined by the user to the payload folder + if init_script_path: + FileUtils.copy_file(init_script_path, + FileUtils.join_paths(tmp_folder.name, + FileUtils.get_file_name(init_script_path))) + + # Get supervisor zip + supervisor_zip_path = ContainerImage.get_supervisor_zip(resources_info, supervisor_version) + # Unzip the supervisor file to the temp file + FileUtils.unzip_folder(supervisor_zip_path, tmp_folder.name) + + # Create dockerfile to generate the new ECR image + FileUtils.create_file_with_content("%s/Dockerfile" % tmp_folder.name, + ContainerImage._create_dockerfile_ecr_image(resources_info.get('lambda'))) + + # Create the ECR Repo and get the image uri + ecr_cli = ECR(resources_info) + repo_name = resources_info.get('lambda').get('name') + ecr_image = ecr_cli.get_repository_uri(repo_name) + if not ecr_image: + logger.info('Creating ECR repository: %s' % repo_name) + ecr_image = ecr_cli.create_repository(repo_name) + + # Build and push the image to the ECR repo + platform = None + arch = resources_info.get('lambda').get('architectures', ['x86_64'])[0] + if arch == 'arm64': + platform = 'linux/arm64' + return ContainerImage._build_push_ecr_image(tmp_folder.name, ecr_image, platform, ecr_cli.get_authorization_token()) + + @staticmethod + def _create_dockerfile_ecr_image(lambda_info: Dict) -> str: + """Create dockerfile to generate the new ECR image.""" + dockerfile = 'from %s\n' % lambda_info.get('container').get('image') + dockerfile += 'ARG FUNCTION_DIR="/var/task"\n' + dockerfile += 'WORKDIR ${FUNCTION_DIR}\n' + dockerfile += 'ENV PATH="${FUNCTION_DIR}:${PATH}"\n' + # Add PYTHONIOENCODING to avoid UnicodeEncodeError as sugested in: + # https://github.com/aws/aws-lambda-python-runtime-interface-client/issues/19 + dockerfile += 'ENV PYTHONIOENCODING="utf8"\n' + + # Add user environment variables + vars = lambda_info.get('container').get('environment').get('Variables', {}) + for key, value in vars.items(): + dockerfile += 'ENV %s="%s"\n' % (key, value) + + dockerfile += 'CMD [ "supervisor" ]\n' + dockerfile += 'ADD supervisor ${FUNCTION_DIR}\n' + dockerfile += 'COPY function_config.yaml ${FUNCTION_DIR}\n' + init_script_path = lambda_info.get('init_script') + if init_script_path: + dockerfile += 'COPY %s ${FUNCTION_DIR}\n' % FileUtils.get_file_name(init_script_path) + return dockerfile + + @staticmethod + def _ecr_image_name_prepared(container_info: Dict) -> str: + """If the user set an already prepared image return the image name.""" + image_name = container_info.get('image') + if ":" not in image_name: + image_name = "%s:latest" % image_name + if not container_info.get('create_image') and ".dkr.ecr." in image_name: + logger.info('Image already prepared in ECR.') + return image_name + return None + + @staticmethod + def _build_push_ecr_image(tmp_folder: str, ecr_image: str, platform: str, auth_token: Set) -> str: + try: + dclient = docker.from_env() + except docker.errors.DockerException: + raise Exception("Error getting docker client. Check if current user has the correct permissions (docker group).") + logger.info('Building new ECR image: %s' % ecr_image) + dclient.images.build(path=tmp_folder, tag=ecr_image, pull=True, platform=platform) + + # Login to the ECR registry + # Known issue it does not work in Widnows WSL environment + registry = os.path.dirname(ecr_image) + logger.info('Login to ECR registry %s' % registry) + dclient.login(username=auth_token[0], password=auth_token[1], registry=registry) + + # Push the image, and change it in the container image to use it insteads of the user one + logger.info('Pushing new image to ECR ...') + for line in dclient.images.push(ecr_image, stream=True, decode=True): + logger.debug(line) + if 'error' in line: + raise Exception("Error pushing image: %s" % line['errorDetail']['message']) + return "%s:latest" % ecr_image \ No newline at end of file diff --git a/scar/providers/aws/controller.py b/scar/providers/aws/controller.py index 7405cdaa..a6d39849 100644 --- a/scar/providers/aws/controller.py +++ b/scar/providers/aws/controller.py @@ -64,6 +64,7 @@ def _get_all_functions(resources_info: Dict): arn_list = ResourceGroups(resources_info).get_resource_arn_list(IAM(resources_info).get_user_name_or_id()) return Lambda(resources_info).get_all_functions(arn_list) + def _check_preheat_function(resources_info: Dict): if resources_info.get('lambda').get('preheat', False): Lambda(resources_info).preheat_function() @@ -146,12 +147,12 @@ def __init__(self, func_call): def init(self) -> None: for resources_info in self.aws_resources: resources_info = deepcopy(resources_info) - _check_function_defined(resources_info) - # We have to create the gateway before creating the function - self._create_api_gateway(resources_info) # Check the specified supervisor version resources_info['lambda']['supervisor']['version'] = SupervisorUtils.check_supervisor_version( resources_info.get('lambda').get('supervisor').get('version')) + _check_function_defined(resources_info) + # We have to create the gateway before creating the function + self._create_api_gateway(resources_info) self._create_lambda_function(resources_info) self._create_log_group(resources_info) self._create_s3_buckets(resources_info) @@ -283,7 +284,14 @@ def _create_s3_buckets(self, resources_info: Dict) -> None: for bucket in resources_info.get('lambda').get('input'): if bucket.get('storage_provider') == 's3': bucket_name, folders = s3_service.create_bucket_and_folders(bucket.get('path')) - Lambda(resources_info).link_function_and_bucket(bucket_name) + lambda_client = Lambda(resources_info) + lambda_client.link_function_and_bucket(bucket_name) + # Check if function is already available + logger.info("Wait function to be 'Active'") + if not lambda_client.wait_function_active(resources_info.get('lambda').get('arn')): + logger.error("Timeout waiting function.") + else: + logger.info("Function 'Active'") s3_service.set_input_bucket_notification(bucket_name, folders) if not folders: logger.info(f'Input bucket "{bucket_name}" successfully created') @@ -344,7 +352,8 @@ def _delete_bucket_notifications(self, resources_info: Dict) -> None: lambda_client = Lambda(resources_info) function_name = resources_info.get('lambda').get('name') resources_info['lambda']['arn'] = lambda_client.get_function_configuration(function_name).get('FunctionArn') - resources_info['lambda']['input'] = lambda_client.get_fdl_config(function_name).get('input', False) + if not resources_info.get('lambda').get('input'): + resources_info['lambda']['input'] = lambda_client.get_fdl_config(function_name).get('input', False) if resources_info.get('lambda').get('input'): for input_storage in resources_info.get('lambda').get('input'): if input_storage.get('storage_provider') == 's3': diff --git a/scar/providers/aws/ecr.py b/scar/providers/aws/ecr.py new file mode 100644 index 00000000..e4e6c0b5 --- /dev/null +++ b/scar/providers/aws/ecr.py @@ -0,0 +1,51 @@ +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Module with classes and methods to manage the +CloudWatch Log functionalities at high level.""" + +from typing import Dict +from scar.providers.aws import GenericClient + + +class ECR(GenericClient): + """Manages the AWS ElasticContainerRegistry functionality""" + + def __init__(self, resources_info: Dict): + super().__init__() + self.resources_info = resources_info + + def get_authorization_token(self) -> str: + """Retrieves an authorization token.""" + return self.client.get_authorization_token() + + def get_registry_url(self) -> str: + """Retrieves the registry URL.""" + registry_id = self.client.get_registry_id() + return "%s.dkr.ecr.us-east-1.amazonaws.com" % registry_id + + def get_repository_uri(self, repository_name: str) -> str: + """Check if a repository exists.""" + response = self.client.describe_repositories(repositoryNames=[repository_name]) + if response: + return response["repositories"][0]["repositoryUri"] + else: + return None + + def create_repository(self, repository_name: str) -> str: + """Creates a repository.""" + response = self.client.create_repository(repository_name) + return response["repository"]["repositoryUri"] + + def delete_repository(self, repository_name: str): + self.client.delete_repository(repository_name) \ No newline at end of file diff --git a/scar/providers/aws/functioncode.py b/scar/providers/aws/functioncode.py index 3541d454..21d2dc8d 100644 --- a/scar/providers/aws/functioncode.py +++ b/scar/providers/aws/functioncode.py @@ -40,6 +40,11 @@ def create_function_config(resources_info): function_cfg.update({"batch": { "multi_node_parallel": resources_info.get('batch').get("multi_node_parallel") }}) + # Add ECR specific info + if resources_info.get('lambda').get('runtime') == "image" and resources_info.get('ecr', {}).get("delete_image") is not None: + function_cfg.update({"ecr": { + "delete_image": resources_info.get('ecr').get("delete_image") + }}) return function_cfg diff --git a/scar/providers/aws/lambdafunction.py b/scar/providers/aws/lambdafunction.py index 2a27901c..f34e7be3 100644 --- a/scar/providers/aws/lambdafunction.py +++ b/scar/providers/aws/lambdafunction.py @@ -19,10 +19,11 @@ from multiprocessing.pool import ThreadPool from zipfile import ZipFile, BadZipfile import yaml +import time from botocore.exceptions import ClientError from scar.http.request import call_http_endpoint, get_file from scar.providers.aws import GenericClient -from scar.providers.aws.functioncode import FunctionPackager +from scar.providers.aws.functioncode import FunctionPackager, create_function_config from scar.providers.aws.lambdalayers import LambdaLayers from scar.providers.aws.s3 import S3 from scar.providers.aws.validators import AWSValidator @@ -30,6 +31,7 @@ import scar.logger as logger from scar.utils import DataTypesUtils, FileUtils, StrUtils, SupervisorUtils from scar.parser.cfgfile import ConfigFileParser +from scar.providers.aws.containerimage import ContainerImage MAX_CONCURRENT_INVOCATIONS = 500 @@ -48,19 +50,34 @@ def __init__(self, resources_info: Dict) -> None: self.resources_info = resources_info self.function = resources_info.get('lambda', {}) self.supervisor_version = resources_info.get('lambda').get('supervisor').get('version') + if (self.function.get('runtime') == "image" and + StrUtils.compare_versions(self.supervisor_version, "1.5.0b3") < 0): + # In case of using image runtime + # it must be 1.5.0-beta3 version or higher + raise Exception("Supervisor version must be 1.5.0 or higher for image runtime.") def _get_creations_args(self, zip_payload_path: str, supervisor_zip_path: str) -> Dict: - return {'FunctionName': self.function.get('name'), - 'Runtime': self.function.get('runtime'), + args = {'FunctionName': self.function.get('name'), 'Role': self.resources_info.get('iam').get('role'), - 'Handler': self.function.get('handler'), - 'Code': self._get_function_code(zip_payload_path, supervisor_zip_path), 'Environment': self.function.get('environment'), 'Description': self.function.get('description'), 'Timeout': self.function.get('timeout'), 'MemorySize': self.function.get('memory'), 'Tags': self.function.get('tags'), - 'Layers': self.function.get('layers')} + 'Architectures': self.function.get('architectures', ['x86_64'])} + if self.function.get('vpc'): + args['VpcConfig'] = self.function.get('vpc') + if self.function.get('file_system'): + args['FileSystemConfigs'] = self.function.get('file_system') + if self.function.get('runtime') == "image": + args['Code'] = {'ImageUri': self.function.get('container').get('image')} + args['PackageType'] = 'Image' + else: + args['Code'] = self._get_function_code(zip_payload_path, supervisor_zip_path) + args['Runtime'] = self.function.get('runtime') + args['Handler'] = self.function.get('handler') + args['Layers'] = self.function.get('layers') + return args def is_asynchronous(self): return self.function.get('asynchronous', False) @@ -72,18 +89,25 @@ def get_access_key(self) -> str: @excp.exception(logger) def create_function(self): # Create tmp folders - supervisor_path = FileUtils.create_tmp_dir() - tmp_folder = FileUtils.create_tmp_dir() - # Download supervisor - supervisor_zip_path = SupervisorUtils.download_supervisor( - self.supervisor_version, - supervisor_path.name - ) - # Manage supervisor layer - self._manage_supervisor_layer(supervisor_zip_path) - # Create function - zip_payload_path = FileUtils.join_paths(tmp_folder.name, 'function.zip') + zip_payload_path = None + supervisor_zip_path = None + if self.function.get('runtime') == "image": + # Create docker image in ECR + self.function['container']['image'] = ContainerImage.create_ecr_image(self.resources_info, + self.supervisor_version) + else: + # Check if supervisor's source is already cached + cached, supervisor_zip_path = SupervisorUtils.is_supervisor_cached(self.supervisor_version) + if not cached: + # Download supervisor + supervisor_zip_path = SupervisorUtils.download_supervisor(self.supervisor_version) + # Manage supervisor layer + self._manage_supervisor_layer(supervisor_zip_path) + # Create function + tmp_folder = FileUtils.create_tmp_dir() + zip_payload_path = FileUtils.join_paths(tmp_folder.name, 'function.zip') self._set_image_id() + self._set_fdl() creation_args = self._get_creations_args(zip_payload_path, supervisor_zip_path) response = self.client.create_function(**creation_args) if response and "FunctionArn" in response: @@ -95,6 +119,10 @@ def _set_image_id(self): if image: self.function['environment']['Variables']['IMAGE_ID'] = image + def _set_fdl(self): + fdl = StrUtils.dict_to_base64_string(create_function_config(self.resources_info)) + self.function['environment']['Variables']['FDL'] = fdl + def _manage_supervisor_layer(self, supervisor_zip_path: str) -> None: layers_client = LambdaLayers(self.resources_info, self.client, supervisor_zip_path) self.function.get('layers', []).append(layers_client.get_supervisor_layer_arn()) @@ -118,7 +146,17 @@ def _get_function_code(self, zip_payload_path: str, supervisor_zip_path: str) -> return code def delete_function(self): - return self.client.delete_function(self.resources_info.get('lambda').get('name')) + function_name = self.resources_info.get('lambda').get('name') + fdl = self.get_fdl_config(function_name) + res = self.client.delete_function(function_name) + runtime = fdl.get('runtime', self.function.get('runtime')) + if runtime == "image": + ecr_info = self.resources_info.get('ecr', {'delete_image': True}) + ecr_info.update(fdl.get('ecr', {})) + # only delete the image if delete_image is True and create_image was True + if ecr_info.get('delete_image') and fdl.get('container', {}).get('create_image', True): + ContainerImage.delete_ecr_image(self.resources_info) + return res def link_function_and_bucket(self, bucket_name: str) -> None: kwargs = {'FunctionName': self.function.get('name'), @@ -161,9 +199,9 @@ def launch_lambda_instance(self): if self.is_asynchronous(): self.set_asynchronous_call_parameters() response = self._invoke_lambda_function() - response_args = {'Response' : response, - 'FunctionName' : self.function.get('name'), - 'IsAsynchronous' : self.function.get('asynchronous')} + response_args = {'Response': response, + 'FunctionName': self.function.get('name'), + 'IsAsynchronous': self.function.get('asynchronous')} return response_args def _get_invocation_payload(self): @@ -204,8 +242,10 @@ def merge_aws_and_local_configuration(self, aws_conf: Dict) -> Dict: result['lambda']['arn'] = aws_conf['FunctionArn'] result['lambda']['timeout'] = aws_conf['Timeout'] result['lambda']['memory'] = aws_conf['MemorySize'] - result['lambda']['environment']['Variables'] = aws_conf['Environment']['Variables'].copy() - result['lambda']['layers'] = aws_conf['Layers'].copy() + if 'Environment' in result: + result['lambda']['environment']['Variables'] = aws_conf['Environment']['Variables'].copy() + if 'Layers' in result: + result['lambda']['layers'] = aws_conf['Layers'].copy() result['lambda']['supervisor']['version'] = aws_conf['SupervisorVersion'] return result @@ -223,7 +263,16 @@ def get_function_configuration(self, arn: str = None) -> Dict: def get_fdl_config(self, arn: str = None) -> Dict: function = arn if arn else self.function.get('name') function_info = self.client.get_function(function) - dep_pack_url = function_info.get('Code').get('Location') + # Get the FDL from the env variable + fdl = function_info.get('Configuration', {}).get('Environment', {}).get('Variables', {}).get('FDL') + if fdl: + return yaml.safe_load(StrUtils.decode_base64(fdl)) + + # In the future this part can be removed + if 'Location' in function_info.get('Code'): + dep_pack_url = function_info.get('Code').get('Location') + else: + return {} dep_pack = get_file(dep_pack_url) # Extract function_config.yaml try: @@ -258,9 +307,9 @@ def add_invocation_permission_from_api_gateway(self): self.client.add_invocation_permission(**kwargs) # Add Invocation permission kwargs['SourceArn'] = api.get('source_arn_invocation').format(api_region=api.get('region'), - account_id=self.resources_info.get('iam').get('account_id'), - api_id=api.get('id'), - stage_name=api.get('stage_name')) + account_id=self.resources_info.get('iam').get('account_id'), + api_id=api.get('id'), + stage_name=api.get('stage_name')) self.client.add_invocation_permission(**kwargs) def get_api_gateway_id(self): @@ -276,7 +325,7 @@ def _get_api_gateway_url(self): stage_name=self.resources_info.get('api_gateway').get('stage_name')) def call_http_endpoint(self): - invoke_args = {'headers' : {'X-Amz-Invocation-Type':'Event'} if self.is_asynchronous() else {}} + invoke_args = {'headers': {'X-Amz-Invocation-Type': 'Event'} if self.is_asynchronous() else {}} self._set_invoke_args(invoke_args) return call_http_endpoint(self._get_api_gateway_url(), **invoke_args) @@ -299,3 +348,12 @@ def _get_b64encoded_binary_data(self): AWSValidator.validate_http_payload_size(data_path, self.is_asynchronous()) with open(data_path, 'rb') as data_file: return base64.b64encode(data_file.read()) + + def wait_function_active(self, function_arn, max_time=60, delay=2): + func = {"State": "Pending"} + wait = 0 + while "State" in func and func["State"] == "Pending" and wait < max_time: + func = self.get_function_configuration(function_arn) + time.sleep(delay) + wait += delay + return func["State"] == "Active" diff --git a/scar/providers/aws/launchtemplates.py b/scar/providers/aws/launchtemplates.py index 685bad38..ad5cda49 100644 --- a/scar/providers/aws/launchtemplates.py +++ b/scar/providers/aws/launchtemplates.py @@ -121,7 +121,7 @@ def get_launch_template_version(self) -> int: 'faas-supervisor'. If it does not exists creates a new one.""" if self._is_supervisor_created(): is_created = self._is_supervisor_version_created() - if is_created is not -1: + if is_created != -1: logger.info(f"Using existent '{self.template_name}' launch template.") return is_created else: diff --git a/scar/providers/aws/response.py b/scar/providers/aws/response.py index 27cc132f..f59b2a27 100644 --- a/scar/providers/aws/response.py +++ b/scar/providers/aws/response.py @@ -187,32 +187,38 @@ def _parse_asynchronous_invocation_response(response, output_type, function_name def _parse_requestresponse_invocation_response(**kwargs): - if kwargs['Response']: - response = kwargs['Response'] - aws_output = 'LambdaOutput' + response = kwargs['Response'] + + if 'errorMessage' in response['Payload']: + json_message = response['Payload'] + text_message = response['Payload']['errorMessage'] + else: + request_id = response['ResponseMetadata']['RequestId'] log_group_name = response['Payload']['headers']['amz-log-group-name'] log_stream_name = response['Payload']['headers']['amz-log-stream-name'] - request_id = response['ResponseMetadata']['RequestId'] if "exception" in response['Payload']['body']: body = ("ERROR launching udocker container: \n " f"{json.loads(response['Payload']['body'])['exception']}") - else: + elif response['Payload']['isBase64Encoded']: body = StrUtils.base64_to_utf8_string(response['Payload']['body']) + else: + body = response['Payload']['body'] text_message = (f"Request Id: {request_id}\n" f"Log Group Name: {log_group_name}\n" f"Log Stream Name: {log_stream_name}\n") text_message += body - json_message = {aws_output : {'StatusCode' : response['StatusCode'], - 'Payload' : body, - 'LogGroupName' : log_group_name, - 'LogStreamName' : log_stream_name, - 'RequestId' : request_id}} - if 'OutputFile' in kwargs and kwargs['OutputFile']: - _print_generic_response(response, kwargs['OutputType'], aws_output, text_message, json_output=json_message, output_file=kwargs['OutputFile']) - else: - _print_generic_response(response, kwargs['OutputType'], aws_output, text_message, json_output=json_message) + json_message = {'LambdaOutput' : {'StatusCode' : response['StatusCode'], + 'Payload' : body, + 'LogGroupName' : log_group_name, + 'LogStreamName' : log_stream_name, + 'RequestId' : request_id}} + output_file = None + if 'OutputFile' in kwargs and kwargs['OutputFile']: + output_file = kwargs['OutputFile'] + _print_generic_response(response, kwargs['OutputType'], 'LambdaOutput', text_message, + json_output=json_message, output_file=output_file) def _parse_base64_response_values(value): diff --git a/scar/providers/aws/udocker.py b/scar/providers/aws/udocker.py index dbfd75d1..17d486dc 100644 --- a/scar/providers/aws/udocker.py +++ b/scar/providers/aws/udocker.py @@ -13,7 +13,7 @@ # limitations under the License. from zipfile import ZipFile -from scar.utils import FileUtils, SysUtils, StrUtils +from scar.utils import FileUtils, SysUtils def _extract_udocker_zip(supervisor_zip_path) -> None: diff --git a/scar/utils.py b/scar/utils.py index a08a944c..8fc3e199 100644 --- a/scar/utils.py +++ b/scar/utils.py @@ -26,7 +26,7 @@ from copy import deepcopy from zipfile import ZipFile from io import BytesIO -from typing import Optional, Dict, List, Generator, Union, Any +from typing import Optional, Dict, List, Generator, Union, Any, Tuple from distutils import dir_util from packaging import version import yaml @@ -397,8 +397,12 @@ class GitHubUtils: def get_latest_release(user: str, project: str) -> str: """Get the tag of the latest release in a repository.""" url = f'https://api.github.com/repos/{user}/{project}/releases/latest' - response = json.loads(request.get_file(url)) - return response.get('tag_name', '') + response = request.get_file(url) + if response: + response = json.loads(response) + return response.get('tag_name', '') + else: + return None @staticmethod def exists_release_in_repo(user: str, project: str, tag_name: str) -> bool: @@ -456,11 +460,14 @@ class SupervisorUtils: _SUPERVISOR_GITHUB_REPO = 'faas-supervisor' _SUPERVISOR_GITHUB_USER = 'grycap' _SUPERVISOR_GITHUB_ASSET_NAME = 'supervisor' + _SUPERVISOR_CACHE_DIR = '/var/tmp/cache/scar' + _SUPERVISOR_SOURCE_NAME = 'faas-supervisor.zip' @classmethod - def download_supervisor(cls, supervisor_version: str, path: str) -> str: + def download_supervisor(cls, supervisor_version: str) -> str: """Downloads the FaaS Supervisor .zip package to the specified path.""" - supervisor_zip_path = FileUtils.join_paths(path, 'faas-supervisor.zip') + path = FileUtils.join_paths(cls._SUPERVISOR_CACHE_DIR, supervisor_version) + supervisor_zip_path = FileUtils.join_paths(path, cls._SUPERVISOR_SOURCE_NAME) supervisor_zip_url = GitHubUtils.get_source_code_url( cls._SUPERVISOR_GITHUB_USER, cls._SUPERVISOR_GITHUB_REPO, @@ -476,6 +483,7 @@ def check_supervisor_version(cls, supervisor_version: str) -> str: if GitHubUtils.exists_release_in_repo(cls._SUPERVISOR_GITHUB_USER, cls._SUPERVISOR_GITHUB_REPO, supervisor_version): + logger.info(f'Using supervisor release: \'{supervisor_version}\'.') return supervisor_version latest_version = SupervisorUtils.get_latest_release() if supervisor_version != 'latest': @@ -496,3 +504,34 @@ def get_latest_release(cls) -> str: """Returns the latest FaaS Supervisor version.""" return GitHubUtils.get_latest_release(cls._SUPERVISOR_GITHUB_USER, cls._SUPERVISOR_GITHUB_REPO) + + @classmethod + def download_supervisor_asset(cls, version: str, asset_name: str, supervisor_zip_path: str) -> str: + """Downloads the FaaS Supervisor asset to the specified path.""" + supervisor_zip_url = GitHubUtils.get_asset_url(cls._SUPERVISOR_GITHUB_USER, + cls._SUPERVISOR_GITHUB_REPO, + asset_name, + version) + with open(supervisor_zip_path, "wb") as thezip: + thezip.write(request.get_file(supervisor_zip_url)) + return supervisor_zip_path + + @classmethod + def is_supervisor_asset_cached(cls, asset_name: str, supervisor_version: str) -> Tuple[bool, str]: + """Check if specified supervisor asset is cached.""" + supervisor_path = FileUtils.join_paths(cls._SUPERVISOR_CACHE_DIR, supervisor_version) + supervisor_zip_path = FileUtils.join_paths(supervisor_path, asset_name) + try: + # The file must exist and be more that 1MB + if os.path.isfile(supervisor_zip_path) and os.path.getsize(supervisor_zip_path) > 1048576: + return True, supervisor_zip_path + elif not os.path.exists(supervisor_path): + os.makedirs(supervisor_path) + except Exception as ex: + logger.warning('Error checking asset file in cache: %s' % ex) + return False, supervisor_zip_path + + @classmethod + def is_supervisor_cached(cls, supervisor_version: str) -> Tuple[bool, str]: + """Check if supervisor source is cached.""" + return SupervisorUtils.is_supervisor_asset_cached(cls._SUPERVISOR_SOURCE_NAME, supervisor_version) \ No newline at end of file diff --git a/setup.py b/setup.py index dc6ccd7c..6becbd3b 100644 --- a/setup.py +++ b/setup.py @@ -29,18 +29,18 @@ long_description_content_type='text/markdown', url='https://github.com/grycap/scar', author='GRyCAP - Universitat Politecnica de Valencia', - author_email='alpegon3@upv.es', + author_email='products@grycap.upv.es', license='Apache 2.0', packages=find_namespace_packages(), install_requires=['setuptools >= 40.8.0', 'boto3', 'urllib3', - 'faas-supervisor', 'tabulate', 'configparser', 'requests', 'pyyaml', 'packaging', + 'docker' ], platforms=["any"], entry_points={ diff --git a/test/files/supervisor.zip b/test/files/supervisor.zip new file mode 100644 index 00000000..db81a34b Binary files /dev/null and b/test/files/supervisor.zip differ diff --git a/test/unit/aws/test_apigateway.py b/test/unit/aws/test_apigateway.py new file mode 100644 index 00000000..e6750684 --- /dev/null +++ b/test/unit/aws/test_apigateway.py @@ -0,0 +1,83 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.apigateway import APIGateway + + +class TestAPIGateway(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + def test_init(self): + ecr = APIGateway({}) + self.assertEqual(type(ecr.client.client).__name__, "APIGateway") + + @patch('boto3.Session') + def test_create_api_gateway(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['create_rest_api', 'get_resources', 'create_resource', 'put_method', + 'put_integration', 'create_deployment']) + session.client.return_value = client + boto_session.return_value = session + apig = APIGateway({'lambda': {'name': 'fname', + 'environment': {'Variables': {}}}, + 'api_gateway': {'name': 'apiname', + 'path_part': 'path', + 'http_method': 'ANY', + 'stage_name': 'scar', + 'integration': {'uri': 'arn:aws:apigateway...'}}}) + apig.client.client.create_rest_api.return_value = {'id': 'apiid'} + apig.client.client.create_resource.return_value = {'id': 'resid'} + apig.client.client.get_resources.return_value = {'items': [{'path': '/', 'id': 'rid'}]} + + apig.create_api_gateway() + + res = {'name': 'apiname', + 'description': 'API created automatically with SCAR', + 'endpointConfiguration': {'types': ['REGIONAL']}} + self.assertEqual(apig.client.client.create_rest_api.call_args_list[0][1], res) + res = {'parentId': 'rid', 'pathPart': 'path', 'restApiId': 'apiid'} + self.assertEqual(apig.client.client.create_resource.call_args_list[0][1], res) + res = {'httpMethod': 'ANY', 'resourceId': 'resid', 'restApiId': 'apiid'} + self.assertEqual(apig.client.client.put_method.call_args_list[0][1], res) + res = {'restApiId': 'apiid', 'resourceId': 'resid', 'httpMethod': 'ANY', 'uri': 'arn:aws:apigateway...'} + self.assertEqual(apig.client.client.put_integration.call_args_list[0][1], res) + res = {'restApiId': 'apiid', 'stageName': 'scar'} + self.assertEqual(apig.client.client.create_deployment.call_args_list[0][1], res) + + @patch('boto3.Session') + def test_delete_api_gateway(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['delete_rest_api']) + session.client.return_value = client + boto_session.return_value = session + apig = APIGateway({'lambda': {'name': 'fname', + 'environment': {'Variables': {'API_GATEWAY_ID': 'apiid'}}}}) + + apig.delete_api_gateway() + + res = {'restApiId': 'apiid'} + self.assertEqual(apig.client.client.delete_rest_api.call_args_list[0][1], res) diff --git a/test/unit/aws/test_batch.py b/test/unit/aws/test_batch.py new file mode 100644 index 00000000..762c3c50 --- /dev/null +++ b/test/unit/aws/test_batch.py @@ -0,0 +1,166 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import base64 +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.batchfunction import Batch + + +class TestBatch(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + def test_init(self): + batch = Batch({'lambda': {'name': 'fname'}}) + self.assertEqual(type(batch.client.client).__name__, "Batch") + + @patch('boto3.Session') + @patch('scar.providers.aws.launchtemplates.SupervisorUtils.get_supervisor_binary_url') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_create_batch_environment(self, load_tmp_config_file, get_supervisor_binary_url, + boto_session): + session = MagicMock(['client']) + client = MagicMock(['register_job_definition', 'describe_launch_templates', + 'create_launch_template', 'create_compute_environment', + 'describe_compute_environments', 'create_job_queue']) + session.client.return_value = client + boto_session.return_value = session + load_tmp_config_file.return_value = {} + get_supervisor_binary_url.return_value = "https://some.es" + + batch = Batch({'lambda': {'name': 'fname', + 'supervisor': {'version': '1.4.2'}, + 'container': {'image': 'some/image:tag', + 'environment': {'Variables': {}}}}, + 'batch': {'service_role': 'srole', + 'memory': 1024, + 'vcpus': 1, + 'type': 'MANAGED', + 'state': 'ENABLED', + 'enable_gpu': True, + 'compute_resources': {'instance_role': 'irole', + 'type': 'EC2', + 'min_v_cpus': 0, + 'max_v_cpus': 2, + 'desired_v_cpus': 1, + 'instance_types': ['m1.small'], + 'subnets': [], + 'security_group_ids': [], + 'launch_template_name': 'temp_name'}, + 'environment': {'Variables': {}}, + 'multi_node_parallel': {'enabled': False}}, + 'iam': {'account_id': 'id', 'role': 'role'}}) + + batch.client.client.register_job_definition.return_value = {} + batch.client.client.describe_launch_templates.return_value = {} + batch.client.client.create_launch_template.return_value = { + 'LaunchTemplate': {'LatestVersionNumber': '1'} + } + batch.client.client.create_compute_environment.return_value = {} + batch.client.client.describe_compute_environments.return_value = { + 'computeEnvironments': [{'state': 'ENABLED', 'status': 'VALID'}] + } + batch.client.client.create_job_queue.return_value = {} + batch.create_batch_environment() + + func_config = b"container:\n" + func_config += b" environment:\n" + func_config += b" Variables: {}\n" + func_config += b" image: some/image:tag\n" + func_config += b"name: fname\n" + func_config += b"storage_providers: {}\n" + func_config += b"supervisor:\n" + func_config += b" version: 1.4.2\n" + func_config_64 = base64.b64encode(func_config).decode() + res = {'jobDefinitionName': 'fname', + 'containerProperties': { + 'image': 'some/image:tag', + 'memory': 1024, + 'vcpus': 1, + 'command': ['/bin/sh', '-c', + 'echo $EVENT | /opt/faas-supervisor/bin/supervisor'], + 'volumes': [{'host': {'sourcePath': '/opt/faas-supervisor/bin'}, + 'name': 'supervisor-bin'}], + 'environment': [{'name': 'AWS_LAMBDA_FUNCTION_NAME', 'value': 'fname'}, + {'name': 'SCRIPT', 'value': ''}, + {'name': 'FUNCTION_CONFIG', 'value': func_config_64}], + 'mountPoints': [{'containerPath': '/opt/faas-supervisor/bin', + 'sourceVolume': 'supervisor-bin'}], + 'resourceRequirements': [{'value': '1', + 'type': 'GPU'}]}, + 'type': 'container'} + self.assertEqual(batch.client.client.register_job_definition.call_args_list[0][1], res) + self.assertEqual(batch.client.client.create_launch_template.call_args_list[0][1]['LaunchTemplateName'], 'temp_name') + self.assertEqual(batch.client.client.create_launch_template.call_args_list[0][1]['VersionDescription'], '1.4.2') + res = {'computeEnvironmentName': 'fname', + 'serviceRole': 'srole', + 'type': 'MANAGED', + 'state': 'ENABLED', + 'computeResources': {'type': 'EC2', + 'minvCpus': 0, + 'maxvCpus': 2, + 'desiredvCpus': 1, + 'instanceTypes': ['m1.small'], + 'subnets': [], + 'securityGroupIds': [], + 'instanceRole': 'irole', + 'launchTemplate': {'launchTemplateName': 'temp_name', + 'version': '1'}}} + self.assertEqual(batch.client.client.create_compute_environment.call_args_list[0][1], res) + + @patch('boto3.Session') + def test_delete_compute_environment(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['describe_job_definitions', 'deregister_job_definition', + 'describe_job_queues', 'update_job_queue', 'delete_job_queue', + 'describe_compute_environments', 'update_compute_environment', + 'delete_compute_environment']) + session.client.return_value = client + boto_session.return_value = session + + batch = Batch({'lambda': {'name': 'fname'}}) + + batch.client.client.describe_job_definitions.return_value = {} + batch.client.client.describe_job_queues.side_effect = [{'jobQueues': [{'state': 'ENABLED', + 'status': 'VALID'}]}, + {'jobQueues': [{'state': 'DISABLED', + 'status': 'VALID'}]}, + {'jobQueues': []}] + batch.client.client.describe_compute_environments.side_effect = [ + {'computeEnvironments': [{'state': 'ENABLED', 'status': 'VALID'}]}, + {'computeEnvironments': [{'state': 'DISABLED', 'status': 'VALID'}]}, + {'computeEnvironments': []} + ] + + batch.delete_compute_environment() + + res = {'jobQueue': 'fname', 'state': 'DISABLED'} + self.assertEqual(batch.client.client.update_job_queue.call_args_list[0][1], res) + res = {'jobQueue': 'fname'} + self.assertEqual(batch.client.client.delete_job_queue.call_args_list[0][1], res) + + res = {'computeEnvironment': 'fname', 'state': 'DISABLED'} + self.assertEqual(batch.client.client.update_compute_environment.call_args_list[0][1], res) + res = {'computeEnvironment': 'fname'} + self.assertEqual(batch.client.client.delete_compute_environment.call_args_list[0][1], res) diff --git a/test/unit/aws/test_cloudwatchlogs.py b/test/unit/aws/test_cloudwatchlogs.py new file mode 100644 index 00000000..0259630f --- /dev/null +++ b/test/unit/aws/test_cloudwatchlogs.py @@ -0,0 +1,65 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.cloudwatchlogs import CloudWatchLogs + + +class TestCloudWatchLogs(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + def test_init(self): + cwl = CloudWatchLogs({}) + self.assertEqual(type(cwl.client.client).__name__, "CloudWatchLogs") + + def test_get_log_group_name(self): + cwl = CloudWatchLogs({'lambda': {'name': 'fname'}}) + self.assertEqual(cwl.get_log_group_name(), '/aws/lambda/fname') + + @patch('boto3.Session') + def test_create_log_group(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['create_log_group', 'put_retention_policy']) + session.client.return_value = client + boto_session.return_value = session + cwl = CloudWatchLogs({'lambda': {'name': 'fname', 'tags': {'createdby': 'scar'}}, + 'cloudwatch': {'log_retention_policy_in_days': 1}}) + cwl.client.client.create_log_group.return_value = "resp" + self.assertEqual(cwl.create_log_group(), "resp") + res = {'logGroupName': '/aws/lambda/fname', 'tags': {'createdby': 'scar'}} + self.assertEqual(cwl.client.client.create_log_group.call_args_list[0][1], res) + + @patch('boto3.Session') + def test_get_aws_logs(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['filter_log_events', 'describe_jobs']) + session.client.return_value = client + boto_session.return_value = session + cwl = CloudWatchLogs({'lambda': {'name': 'fname'}, + 'cloudwatch': {'log_stream_name': 'stream', + 'request_id': 'reqid'}}) + cwl.client.client.filter_log_events.return_value = {'events': [{'message': 'mess', 'timestamp': 'times'}]} + cwl.client.client.describe_jobs.return_value = {'jobs': [{'status': 'SUCCEEDED'}]} + self.assertEqual(cwl.get_aws_logs(), "Batch job status: SUCCEEDED\nmess") diff --git a/test/unit/aws/test_controller.py b/test/unit/aws/test_controller.py new file mode 100644 index 00000000..b95af858 --- /dev/null +++ b/test/unit/aws/test_controller.py @@ -0,0 +1,340 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +import base64 +import json +from io import StringIO +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.controller import AWS + + +class TestController(unittest.TestCase): + + def __init__(self, *args): + os.environ["AWS_DEFAULT_REGION"] = "us-east-1" + unittest.TestCase.__init__(self, *args) + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.Lambda') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_invoke(self, load_tmp_config_file, lambda_cli, iam_cli): + lcli = MagicMock(['call_http_endpoint']) + response = MagicMock(["ok", "headers", "text"]) + response.ok.return_value = True + response.text = base64.b64encode(b"text") + response.headers = {'amz-lambda-request-id': 'id', 'amz-log-group-name': 'group', 'amz-log-stream-name': 'stream'} + lcli.call_http_endpoint.return_value = response + lambda_cli.return_value = lcli + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + + AWS("invoke") + self.assertEqual(lambda_cli.call_args_list[0][0][0]['lambda']['name'], "fname") + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.S3') + @patch('scar.providers.aws.controller.APIGateway') + @patch('scar.providers.aws.controller.CloudWatchLogs') + @patch('scar.providers.aws.controller.Lambda') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + @patch('scar.providers.aws.controller.SupervisorUtils.check_supervisor_version') + def test_init(self, check_supervisor_version, load_tmp_config_file, lambda_cli, + cloud_watch_cli, api_gateway_cli, s3_cli, iam_cli): + lcli = MagicMock(['find_function', 'create_function', 'get_access_key', 'wait_function_active', + 'add_invocation_permission_from_api_gateway', 'link_function_and_bucket']) + lcli.find_function.return_value = False + lcli.create_function.return_value = {'FunctionName': 'fname', 'FunctionArn': 'arn', 'Timeout': 10, 'MemorySize': 512} + lcli.wait_function_active.return_value = True + lambda_cli.return_value = lcli + cwcli = MagicMock(['create_log_group', 'get_log_group_name']) + cwcli.create_log_group.return_value = {'Payload': {'Body': 'body'}} + cwcli.get_log_group_name.return_value = "group" + agcli = MagicMock(['create_api_gateway']) + s3cli = MagicMock(['create_bucket_and_folders', 'set_input_bucket_notification']) + s3cli.create_bucket_and_folders.return_value = "name", "folders" + s3_cli.return_value = s3cli + api_gateway_cli.return_value = agcli + cloud_watch_cli.return_value = cwcli + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "input": [{"storage_provider": "s3", + "path": "some"}], + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}, + "api_gateway": {"name": "api_name"}}]}} + check_supervisor_version.return_value = '1.4.2' + + old_stdout = sys.stdout + sys.stdout = StringIO() + AWS("init") + res = sys.stdout.getvalue() + sys.stdout = old_stdout + expected_res = "Function 'fname' successfully created.\n" + expected_res += "Log group 'group' successfully created.\n" + expected_res += "Wait function to be 'Active'\n" + expected_res += "Function 'Active'\n" + self.assertEqual(res, expected_res) + self.assertEqual(lcli.create_function.call_count, 1) + self.assertEqual(cwcli.create_log_group.call_count, 1) + self.assertEqual(agcli.create_api_gateway.call_count, 1) + self.assertEqual(iamcli.get_user_name_or_id.call_count, 1) + self.assertEqual(s3cli.create_bucket_and_folders.call_args_list[0][0][0], 'some') + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.Lambda') + @patch('scar.providers.aws.controller.S3') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + @patch('scar.providers.aws.controller.SupervisorUtils.check_supervisor_version') + @patch('scar.providers.aws.controller.input') + def test_run(self, mock_input, check_supervisor_version, load_tmp_config_file, s3_cli, lambda_cli, iam_cli): + lcli = MagicMock(['launch_lambda_instance', 'launch_request_response_event', 'process_asynchronous_lambda_invocations']) + payload = MagicMock(['read']) + payload_json = {'headers': {'amz-log-group-name': 'group', + 'amz-log-stream-name': 'stream'}, + 'isBase64Encoded': False, + 'body': 'body'} + payload.read.return_value = json.dumps(payload_json).encode() + response = {'LogResult': base64.b64encode(b"log"), + 'Payload': payload, + 'StatusCode': 200, + 'ResponseMetadata': {'RequestId': 'reqid', + 'HTTPHeaders': {'x-amz-log-result': base64.b64encode(b"log2")}}} + lcli.launch_lambda_instance.return_value = {'OutputFile': 'file', 'Response': response, + 'IsAsynchronous': False} + lambda_cli.return_value = lcli + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "input": [{"storage_provider": "s3", + "path": "some"}], + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + check_supervisor_version.return_value = '1.4.2' + s3cli = MagicMock(['get_bucket_file_list', 'get_s3_event', 'get_s3_event_list']) + s3cli.get_bucket_file_list.return_value = ['f1', 'f2'] + s3_cli.return_value = s3cli + mock_input.return_value = "Y" + + old_stdout = sys.stdout + sys.stdout = StringIO() + AWS("run") + res = sys.stdout.getvalue() + sys.stdout = old_stdout + self.assertEqual(res, "This function has an associated 'S3' input bucket.\nFiles found: '['f1', 'f2']'\n") + self.assertEqual(lambda_cli.call_args_list[0][0][0]['lambda']['name'], "fname") + + # Test run witout input file + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + + old_stdout = sys.stdout + sys.stdout = StringIO() + AWS("run") + res = sys.stdout.getvalue() + sys.stdout = old_stdout + self.assertEqual(res, 'Request Id: reqid\nLog Group Name: group\nLog Stream Name: stream\nbody\n') + self.assertEqual(lambda_cli.call_args_list[1][0][0]['lambda']['name'], "fname") + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.Lambda') + @patch('scar.providers.aws.controller.APIGateway') + @patch('scar.providers.aws.controller.CloudWatchLogs') + @patch('scar.providers.aws.controller.Batch') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_rm(self, load_tmp_config_file, batch_cli, cloud_watch_cli, api_gateway_cli, lambda_cli, iam_cli): + lcli = MagicMock(['find_function', 'get_function_configuration', 'get_fdl_config', 'delete_function']) + lcli.get_function_configuration.return_value = {'Environment': {'Variables': {'API_GATEWAY_ID': 'i'}}} + lcli.find_function.return_value = True + lcli.get_fdl_config.return_value = {'input': False} + lcli.delete_function.return_value = "" + lambda_cli.return_value = lcli + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "environment": {'Variables': {}}, + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + agcli = MagicMock(['delete_api_gateway']) + agcli.delete_api_gateway.return_value = "" + api_gateway_cli.return_value = agcli + cwcli = MagicMock(['delete_log_group', 'get_log_group_name']) + cwcli.delete_log_group.return_value = "" + cwcli.get_log_group_name.return_value = "gname" + cloud_watch_cli.return_value = cwcli + bcli = MagicMock(['exist_compute_environments', 'delete_compute_environment']) + bcli.exist_compute_environments.return_value = True + batch_cli.return_value = bcli + + AWS("rm") + self.assertEqual(lambda_cli.call_args_list[0][0][0]['lambda']['name'], "fname") + self.assertEqual(cwcli.delete_log_group.call_count, 1) + self.assertEqual(agcli.delete_api_gateway.call_count, 1) + self.assertEqual(lcli.delete_function.call_count, 1) + self.assertEqual(bcli.delete_compute_environment.call_count, 1) + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.S3') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_get(self, load_tmp_config_file, s3_cli, iam_cli): + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "input": [{"storage_provider": "s3", + "path": "some"}], + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + s3cli = MagicMock(['get_bucket_file_list', 'download_file']) + s3cli.get_bucket_file_list.return_value = ['f1', 'f2'] + s3_cli.return_value = s3cli + + AWS("get") + self.assertEqual(s3cli.download_file.call_args_list[0][0], ('some', 'f1', 'f1')) + self.assertEqual(s3cli.download_file.call_args_list[1][0], ('some', 'f2', 'f2')) + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.S3') + @patch('os.path.isdir') + @patch('scar.providers.aws.controller.FileUtils.get_all_files_in_directory') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_put(self, load_tmp_config_file, get_all_files_in_directory, is_dir, s3_cli, iam_cli): + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "input": [{"storage_provider": "s3", + "path": "some"}], + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + s3cli = MagicMock(['create_bucket_and_folders', 'upload_file']) + s3cli.create_bucket_and_folders.return_value = 'bucket', 'folder' + s3_cli.return_value = s3cli + is_dir.return_value = True + get_all_files_in_directory.return_value = ['f1', 'f2'] + + AWS("put") + self.assertEqual(s3cli.upload_file.call_args_list[0][1], {'bucket': 'bucket', + 'folder_name': 'folder', + 'file_path': 'f1'}) + self.assertEqual(s3cli.upload_file.call_args_list[1][1], {'bucket': 'bucket', + 'folder_name': 'folder', + 'file_path': 'f2'}) + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.CloudWatchLogs') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_log(self, load_tmp_config_file, cloud_watch_cli, iam_cli): + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "input": [{"storage_provider": "s3", + "path": "some"}], + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + cwcli = MagicMock(['get_aws_logs']) + cwcli.get_aws_logs.return_value = "log\nlog2" + cloud_watch_cli.return_value = cwcli + + old_stdout = sys.stdout + sys.stdout = StringIO() + AWS("log") + res = sys.stdout.getvalue() + sys.stdout = old_stdout + self.assertEqual(cwcli.get_aws_logs.call_count, 1) + self.assertEqual(res, 'log\nlog2\n') + + @patch('scar.providers.aws.controller.IAM') + @patch('scar.providers.aws.controller.S3') + @patch('scar.providers.aws.controller.ResourceGroups') + @patch('scar.providers.aws.controller.Lambda') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_ls(self, load_tmp_config_file, lambda_cli, res_cli, s3_cli, iam_cli): + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "input": [{"storage_provider": "s3", + "path": "some"}], + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + iamcli = MagicMock(['get_user_name_or_id']) + iamcli.get_user_name_or_id.return_value = "username" + iam_cli.return_value = iamcli + s3cli = MagicMock(['get_bucket_file_list']) + s3cli.get_bucket_file_list.return_value = ['f1', 'f2'] + s3_cli.return_value = s3cli + rcli = MagicMock(['get_resource_arn_list']) + res_cli.return_value = rcli + rcli.get_resource_arn_list.return_value = ['rarn'] + + + + old_stdout = sys.stdout + sys.stdout = StringIO() + AWS("ls") + res = sys.stdout.getvalue() + sys.stdout = old_stdout + self.assertEqual(res, 'f1\nf2\n') + + load_tmp_config_file.return_value = {"functions": {"aws": [{"lambda": {"name": "fname", + "supervisor": {"version": "latest"}}, + "iam": {"account_id": "id", + "role": "role"}}]}} + lcli = MagicMock(['get_all_functions']) + lcli.get_all_functions.return_value = [{"lambda": {"environment": {"Variables": {"API_GATEWAY_ID": "aid", + "IMAGE_ID": "image"}}, + "supervisor": {"version": "latest"}, + "memory": 1024, + "timeout": 300, + "name": "fname"}, + "api_gateway": {"stage_name": "stage", + "region": "region"}}] + lambda_cli.return_value = lcli + + old_stdout = sys.stdout + sys.stdout = StringIO() + AWS("ls") + res = sys.stdout.getvalue() + sys.stdout = old_stdout + expected_res = """AWS FUNCTIONS: +NAME MEMORY TIME IMAGE_ID API_URL SUPERVISOR_VERSION +------ -------- ------ ---------- --------------------------------------------------------- -------------------- +fname 1024 300 image https://aid.execute-api.region.amazonaws.com/stage/launch latest\n""" + self.assertEqual(res, expected_res) diff --git a/test/unit/aws/test_ecr.py b/test/unit/aws/test_ecr.py new file mode 100644 index 00000000..d62ab22c --- /dev/null +++ b/test/unit/aws/test_ecr.py @@ -0,0 +1,81 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +from mock import MagicMock +from mock import patch, call + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.ecr import ECR + + +class TestECR(unittest.TestCase): + + def __init__(self, *args): + os.environ["AWS_DEFAULT_REGION"] = "us-east-1" + unittest.TestCase.__init__(self, *args) + + def test_init(self): + ecr = ECR({}) + self.assertEqual(type(ecr.client.client).__name__, "ECR") + + def _init_mocks(self, call_list): + session = MagicMock(['client']) + client = MagicMock(call_list) + session.client.return_value = client + return session + + @patch('boto3.Session') + def test_get_authorization_token(self, boto_session): + boto_session.return_value = self._init_mocks(['get_authorization_token']) + ecr = ECR({}) + token = "QVdTOnRva2Vu" + ecr.client.client.get_authorization_token.return_value = {'authorizationData': [{'authorizationToken': token}]} + self.assertEqual(ecr.get_authorization_token(), ["AWS", "token"]) + + @patch('boto3.Session') + def test_get_registry_url(self, boto_session): + boto_session.return_value = self._init_mocks(['describe_registry']) + ecr = ECR({}) + ecr.client.client.describe_registry.return_value = {'registryId': 'REG_ID'} + self.assertEqual(ecr.get_registry_url(), "REG_ID.dkr.ecr.us-east-1.amazonaws.com") + + @patch('boto3.Session') + def test_get_repository_uri(self, boto_session): + boto_session.return_value = self._init_mocks(['describe_repositories']) + ecr = ECR({}) + ecr.client.client.describe_repositories.return_value = {'repositories': [{'repositoryUri': 'URI'}]} + self.assertEqual(ecr.get_repository_uri('repo_name'), 'URI') + self.assertEqual(ecr.client.client.describe_repositories.call_args_list[0], call(repositoryNames=['repo_name'])) + + @patch('boto3.Session') + def test_create_repository(self, boto_session): + boto_session.return_value = self._init_mocks(['create_repository']) + ecr = ECR({}) + ecr.client.client.create_repository.return_value = {'repository': {'repositoryUri': 'URI'}} + self.assertEqual(ecr.create_repository('repo_name'), 'URI') + self.assertEqual(ecr.client.client.create_repository.call_args_list[0], call(repositoryName='repo_name')) + + @patch('boto3.Session') + def test_delete_repository(self, boto_session): + boto_session.return_value = self._init_mocks(['delete_repository']) + ecr = ECR({}) + ecr.delete_repository('repo_name') + self.assertEqual(ecr.client.client.delete_repository.call_args_list[0], call(repositoryName='repo_name', force=True)) diff --git a/test/unit/aws/test_iam.py b/test/unit/aws/test_iam.py new file mode 100644 index 00000000..3c37c62a --- /dev/null +++ b/test/unit/aws/test_iam.py @@ -0,0 +1,47 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.iam import IAM + + +class TestIAM(unittest.TestCase): + + def __init__(self, *args): + os.environ["AWS_DEFAULT_REGION"] = "us-east-1" + unittest.TestCase.__init__(self, *args) + + def test_init(self): + ecr = IAM({}) + self.assertEqual(type(ecr.client.client).__name__, "IAM") + + @patch('boto3.Session') + def test_get_user_name_or_id(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['get_user']) + session.client.return_value = client + boto_session.return_value = session + iam = IAM({}) + iam.client.client.get_user.return_value = {'UserName': 'name', 'User': {'UserId': 'id'}} + self.assertEqual(iam.get_user_name_or_id(), 'name') diff --git a/test/unit/aws/test_lambdafunction.py b/test/unit/aws/test_lambdafunction.py new file mode 100644 index 00000000..28eba925 --- /dev/null +++ b/test/unit/aws/test_lambdafunction.py @@ -0,0 +1,338 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +import tempfile +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.utils import StrUtils +from scar.providers.aws.lambdafunction import Lambda + + +class TestLambda(unittest.TestCase): + + def __init__(self, *args): + os.environ["AWS_DEFAULT_REGION"] = "us-east-1" + unittest.TestCase.__init__(self, *args) + + def _init_mocks(self, call_list): + session = MagicMock(['client']) + client = MagicMock(call_list) + session.client.return_value = client + + resource_info = {'lambda': {'name': 'fname', + 'runtime': 'python3.7', + 'timeout': 300, + 'memory': 512, + 'layers': [], + 'log_type': 'Tail', + 'tags': {'createdby': 'scar'}, + 'handler': 'some.handler', + 'description': 'desc', + 'deployment': {'bucket': 'someb', + 'max_s3_payload_size': 262144000}, + 'environment': {'Variables': {'IMAGE_ID': 'some/image'}}, + 'container': {'image': 'some/image:tag', + 'image_file': 'some.tgz', + 'environment': {'Variables': {}}}, + 'supervisor': {'version': '1.4.2', + 'layer_name': 'layername'}}, + 'ecr': {"delete_image": True}, + 'api_gateway': {'endpoint': 'https://{api_id}.{api_region}/{stage_name}/l', + 'region': 'us-east-1', + 'stage_name': 'scar'}, + 'iam': {'role': 'iamrole'}} + + lam = Lambda(resource_info) + return session, lam, client + + def test_init(self): + cli = Lambda({'lambda': {'name': 'fname', + 'supervisor': {'version': '1.4.2'}}}) + self.assertEqual(type(cli.client.client).__name__, "Lambda") + + @patch('boto3.Session') + @patch('scar.providers.aws.launchtemplates.SupervisorUtils.download_supervisor') + @patch('scar.providers.aws.udocker.Udocker.prepare_udocker_image') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_create_function(self, load_tmp_config_file, prepare_udocker_image, + download_supervisor, boto_session): + session, lam, _ = self._init_mocks(['list_layers', 'publish_layer_version', 'get_bucket_location', 'put_object', + 'create_function', 'list_layer_versions']) + boto_session.return_value = session + + load_tmp_config_file.return_value = {} + + tests_path = os.path.dirname(os.path.abspath(__file__)) + download_supervisor.return_value = os.path.join(tests_path, "../../files/supervisor.zip") + + lam.client.client.list_layers.return_value = {'Layers': [{'LayerName': 'layername'}]} + lam.client.client.publish_layer_version.return_value = {'LayerVersionArn': '1'} + lam.client.client.create_function.return_value = {'FunctionArn': 'farn'} + lam.client.client.list_layer_versions.return_value = {'LayerVersions': []} + + lam.create_function() + + fdl = {"storage_providers": {}, + "name": "fname", + "runtime": "python3.7", + "timeout": 300, + "memory": 512, + "layers": ["1"], + "log_type": "Tail", + "tags": {"createdby": "scar"}, + "handler": "some.handler", + "description": "desc", + "deployment": {"bucket": "someb", "max_s3_payload_size": 262144000}, + "environment": {"Variables": {"IMAGE_ID": "some/image:tag"}}, + "container": {"image": "some/image:tag", "image_file": "some.tgz", "environment": {"Variables": {}}}, + "supervisor": {"version": "1.4.2", "layer_name": "layername"}} + res = {'FunctionName': 'fname', + 'Role': 'iamrole', + 'Environment': {'Variables': {'IMAGE_ID': 'some/image:tag', + 'FDL': StrUtils.dict_to_base64_string(fdl)}}, + 'Description': 'desc', + 'Timeout': 300, + 'MemorySize': 512, + 'Tags': {'createdby': 'scar'}, + 'Code': {'S3Bucket': 'someb', 'S3Key': 'lambda/fname.zip'}, + 'Runtime': 'python3.7', + 'Handler': 'some.handler', + 'Architectures': ['x86_64'], + 'Layers': ['1']} + self.assertEqual(lam.client.client.create_function.call_args_list[0][1], res) + + self.assertEqual(lam.client.client.publish_layer_version.call_args_list[0][1]['LayerName'], "layername") + self.assertEqual(lam.client.client.publish_layer_version.call_args_list[0][1]['Description'], "1.4.2") + self.assertEqual(len(lam.client.client.publish_layer_version.call_args_list[0][1]['Content']['ZipFile']), 99662) + + @patch('boto3.Session') + @patch('scar.providers.aws.launchtemplates.SupervisorUtils.download_supervisor_asset') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + @patch('scar.providers.aws.lambdafunction.FileUtils.unzip_folder') + @patch('docker.from_env') + def test_create_function_image(self, from_env, unzip_folder, load_tmp_config_file, + download_supervisor_asset, boto_session): + session, lam, client = self._init_mocks(['create_function', 'create_repository', + 'describe_registry', 'get_authorization_token']) + boto_session.return_value = session + + load_tmp_config_file.return_value = {} + + tests_path = os.path.dirname(os.path.abspath(__file__)) + download_supervisor_asset.return_value = os.path.join(tests_path, "../../files/supervisor.zip") + + docker = MagicMock(['login', 'images']) + docker.images = MagicMock(['build', 'push']) + from_env.return_value = docker + + client.create_repository.return_value = {"repository": {"repositoryUri": "repouri"}} + client.describe_registry.return_value = {'registryId': 'regid'} + client.get_authorization_token.return_value = {'authorizationData': [{'authorizationToken': 'QVdTOnRva2Vu'}]} + + lam.resources_info['lambda']['runtime'] = 'image' + lam.resources_info['lambda']['supervisor']['version'] = lam.supervisor_version = '1.5.0' + lam.resources_info['lambda']['vpc'] = {'SubnetIds': ['subnet'], + 'SecurityGroupIds': ['sg']} + lam.resources_info['lambda']['file_system'] = [{'Arn': 'efsaparn', '': '/mnt'}] + + lam.create_function() + fdl = {"storage_providers": {}, + "name": "fname", + "runtime": "image", + "timeout": 300, + "memory": 512, + "layers": [], + "log_type": "Tail", + "tags": {"createdby": "scar"}, + "handler": "some.handler", + "description": "desc", + "deployment": {"bucket": "someb", "max_s3_payload_size": 262144000}, + "environment": {"Variables": {"IMAGE_ID": "repouri:latest"}}, + "container": {"image": "repouri:latest", "image_file": "some.tgz", "environment": {"Variables": {}}}, + "supervisor": {"version": "1.5.0", "layer_name": "layername"}, + "vpc": {"SubnetIds": ["subnet"], "SecurityGroupIds": ["sg"]}, + "file_system": [{'Arn': 'efsaparn', '': '/mnt'}], + "ecr": {"delete_image": True}} + res = {'FunctionName': 'fname', + 'Role': 'iamrole', + 'Environment': {'Variables': {'IMAGE_ID': 'repouri:latest', + 'FDL': StrUtils.dict_to_base64_string(fdl)}}, + 'Description': 'desc', + 'Timeout': 300, + 'MemorySize': 512, + 'PackageType': 'Image', + 'Tags': {'createdby': 'scar'}, + 'Architectures': ['x86_64'], + 'VpcConfig': {'SubnetIds': ['subnet'], + 'SecurityGroupIds': ['sg']}, + 'FileSystemConfigs': [{'Arn': 'efsaparn', '': '/mnt'}], + 'Code': {'ImageUri': 'repouri:latest'}} + self.assertEqual(lam.client.client.create_function.call_args_list[0][1], res) + self.assertEqual(docker.images.push.call_args_list[0][0][0], "repouri") + self.assertEqual(docker.images.build.call_args_list[0][1]['tag'], "repouri") + + @patch('boto3.Session') + def test_delete_function(self, boto_session): + session, lam, _ = self._init_mocks(['delete_function', 'get_function']) + boto_session.return_value = session + + lam.client.client.get_function.return_value = {'Configuration': {'Environment': {'Variables': {'FDL': 'e30='}}}} + lam.client.client.delete_function.return_value = {} + + lam.delete_function() + self.assertEqual(lam.client.client.delete_function.call_args_list[0][1], {'FunctionName': 'fname'}) + + @patch('boto3.Session') + @patch('scar.providers.aws.containerimage.ECR') + def test_delete_function_image(self, ecr_client, boto_session): + session, lam, _ = self._init_mocks(['delete_function', 'get_function']) + boto_session.return_value = session + + ecr = MagicMock(['get_repository_uri', 'delete_repository']) + ecr.get_repository_uri.return_value = "repouri" + ecr_client.return_value = ecr + + lam.client.client.get_function.return_value = {'Configuration': {'Environment': {'Variables': {'FDL': 'cnVudGltZTogaW1hZ2U='}}}} + lam.client.client.delete_function.return_value = {} + lam.resources_info['lambda']['runtime'] = 'image' + + lam.delete_function() + self.assertEqual(lam.client.client.delete_function.call_args_list[0][1], {'FunctionName': 'fname'}) + self.assertEqual(ecr.delete_repository.call_args_list[0][0][0], 'fname') + + @patch('boto3.Session') + def test_preheat_function(self, boto_session): + session, lam, _ = self._init_mocks(['invoke']) + boto_session.return_value = session + + lam.preheat_function() + res = {'FunctionName': 'fname', 'InvocationType': 'Event', 'LogType': 'None', 'Payload': '{}'} + self.assertEqual(lam.client.client.invoke.call_args_list[0][1], res) + + @patch('boto3.Session') + def test_find_function(self, boto_session): + session, lam, _ = self._init_mocks(['get_function_configuration']) + boto_session.return_value = session + + lam.client.client.get_function_configuration.return_value = {} + + self.assertEqual(lam.find_function('fname'), True) + res = {'FunctionName': 'fname'} + self.assertEqual(lam.client.client.get_function_configuration.call_args_list[0][1], res) + + @patch('boto3.Session') + def test_process_asynchronous_lambda_invocations(self, boto_session): + session, lam, _ = self._init_mocks(['invoke', 'get_function_configuration']) + boto_session.return_value = session + + lam.client.client.get_function_configuration.return_value = {} + + event = {'Records': [{'s3': {'object': {'key': 'okey'}}}]} + lam.process_asynchronous_lambda_invocations([event]) + + res = {'FunctionName': 'fname', + 'InvocationType': 'Event', + 'LogType': 'None', + 'Payload': '{"Records": [{"s3": {"object": {"key": "okey"}}}]}'} + self.assertEqual(lam.client.client.invoke.call_args_list[0][1], res) + + @patch('boto3.Session') + @patch('requests.get') + @patch('requests.post') + @patch('scar.providers.aws.validators.FileUtils.get_file_size') + def test_call_http_endpoint(self, get_file_size, post, get, boto_session): + session, lam, _ = self._init_mocks(['get_function_configuration']) + boto_session.return_value = session + + lam.client.client.get_function_configuration.return_value = {'Environment': {'Variables': {'API_GATEWAY_ID': 'apiid'}}} + lam.call_http_endpoint() + self.assertEqual(get.call_args_list[0][0][0], 'https://apiid.us-east-1/scar/l') + + tmpfile = tempfile.NamedTemporaryFile(delete=False, suffix=".yaml") + tmpfile.write(b"somedata\n") + tmpfile.close() + + lam.resources_info['api_gateway']['data_binary'] = tmpfile.name + lam.resources_info['api_gateway']['parameters'] = {'key': 'value'} + + get_file_size.return_value = 1024 + + lam.call_http_endpoint() + + os.unlink(tmpfile.name) + self.assertEqual(post.call_args_list[0][0][0], 'https://apiid.us-east-1/scar/l') + self.assertEqual(post.call_args_list[0][1]['data'], b'c29tZWRhdGEK') + self.assertEqual(post.call_args_list[0][1]['params'], {'key': 'value'}) + + @patch('boto3.Session') + @patch('requests.get') + @patch('scar.providers.aws.lambdafunction.ZipFile') + def test_get_fdl_config(self, zipfile, get, boto_session): + session, lam, _ = self._init_mocks(['get_function']) + boto_session.return_value = session + + response = MagicMock(['content']) + response.content = b"aa" + get.return_value = response + lam.client.client.get_function.return_value = {'SupervisorVersion': '1.4.2', + 'Code': {'Location': 'http://loc.es'}} + + zfile = MagicMock(['__enter__', '__exit__']) + zipfile.return_value = zfile + + filedata = MagicMock(['read']) + filedata.read.side_effect = ["- item\n- item2\n", ""] + filecont = MagicMock(['__enter__', '__exit__']) + filecont.__enter__.return_value = filedata + + thezip = MagicMock(['open']) + thezip.open.return_value = filecont + zfile.__enter__.return_value = thezip + + self.assertEqual(lam.get_fdl_config('arn'), ['item', 'item2']) + self.assertEqual(get.call_args_list[0][0][0], "http://loc.es") + + @patch('boto3.Session') + def test_get_all_functions(self, boto_session): + session, lam, _ = self._init_mocks(['get_function_configuration']) + boto_session.return_value = session + + lam.client.client.get_function_configuration.return_value = {'FunctionName': 'fname', + 'FunctionArn': 'arn1', + 'Timeout': 600, + 'MemorySize': 1024} + + res = lam.get_all_functions(['arn1']) + self.assertEqual(res[0]['lambda']['memory'], 1024) + self.assertEqual(res[0]['lambda']['supervisor']['version'], '-') + + @patch('boto3.Session') + @patch('time.sleep') + def test_wait_function_active(self, sleep, boto_session): + session, lam, _ = self._init_mocks(['get_function_configuration']) + boto_session.return_value = session + + lam.client.client.get_function_configuration.return_value = {'State': 'Active'} + + self.assertEqual(lam.wait_function_active('farn'), True) + self.assertEqual(sleep.call_count, 1) diff --git a/test/unit/aws/test_resourcegroups.py b/test/unit/aws/test_resourcegroups.py new file mode 100644 index 00000000..38126f26 --- /dev/null +++ b/test/unit/aws/test_resourcegroups.py @@ -0,0 +1,45 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.resourcegroups import ResourceGroups + + +class TestResourceGroups(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + def test_init(self): + rg = ResourceGroups({}) + self.assertEqual(type(rg.client.client).__name__, "ResourceGroupsTaggingAPI") + + @patch('boto3.Session') + def test_get_resource_arn_list(self, boto_session): + session = MagicMock(['client']) + client = MagicMock(['get_resources']) + session.client.return_value = client + boto_session.return_value = session + rg = ResourceGroups({}) + rg.client.client.get_resources.return_value = {'ResourceTagMappingList': [{'ResourceARN': 'rarn'}]} + self.assertEqual(rg.get_resource_arn_list('userid'), ['rarn']) diff --git a/test/unit/aws/test_s3.py b/test/unit/aws/test_s3.py new file mode 100644 index 00000000..29cc64bb --- /dev/null +++ b/test/unit/aws/test_s3.py @@ -0,0 +1,121 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import tempfile +import os +import os.path +from mock import MagicMock +from mock import patch, call +from botocore.exceptions import ClientError + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.aws.s3 import S3 + + +class TestS3(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + def test_init(self): + s3 = S3({}) + self.assertEqual(type(s3.client.client).__name__, "S3") + + def _init_mocks(self, call_list): + session = MagicMock(['client']) + client = MagicMock(call_list) + session.client.return_value = client + return session + + @patch('boto3.Session') + def test_create_bucket(self, boto_session): + boto_session.return_value = self._init_mocks(['get_bucket_location', 'create_bucket']) + s3 = S3({}) + s3.client.client.get_bucket_location.side_effect = ClientError({'Error': {'Code': 'NoSuchBucket'}}, 'op') + s3.client.client.create_bucket.return_value = {} + s3.create_bucket('bname') + self.assertEqual(s3.client.client.create_bucket.call_args_list[0], call(ACL='private', Bucket='bname')) + + @patch('boto3.Session') + def test_upload_file(self, boto_session): + boto_session.return_value = self._init_mocks(['put_object']) + s3 = S3({}) + s3.client.client.put_object.return_value = {} + tmpfile = tempfile.NamedTemporaryFile(delete=False) + tmpfile.write(b'Hello world!') + tmpfile.close() + s3.upload_file('bname', file_path=tmpfile.name) + os.unlink(tmpfile.name) + self.assertEqual(s3.client.client.put_object.call_args_list[0], + call(Bucket='bname', Key=os.path.basename(tmpfile.name), Body=b'Hello world!')) + + @patch('boto3.Session') + def test_get_bucket_file_list(self, boto_session): + boto_session.return_value = self._init_mocks(['get_bucket_location', 'list_objects_v2']) + s3 = S3({}) + s3.client.client.list_objects_v2.return_value = {'IsTruncated': False, 'Contents': [{'Key': 'key1'}]} + self.assertEqual(s3.get_bucket_file_list({'path': '/'}), ['key1']) + + @patch('boto3.Session') + def test_set_input_bucket_notification(self, boto_session): + boto_session.return_value = self._init_mocks(['put_bucket_notification_configuration', + 'get_bucket_notification_configuration']) + s3 = S3({'lambda': {'arn': 'arn'}}) + s3.client.client.get_bucket_notification_configuration.return_value = {'LambdaFunctionConfigurations': []} + s3.client.client.put_bucket_notification_configuration.return_value = {} + s3.set_input_bucket_notification('bucket','folders') + expected_res = call(Bucket='bucket', + NotificationConfiguration={'LambdaFunctionConfigurations': [{'LambdaFunctionArn': 'arn', + 'Events': ['s3:ObjectCreated:*'], + 'Filter': {'Key': {'FilterRules': [{'Name': 'prefix', + 'Value': 'folders/'}]}}}]}) + self.assertEqual(s3.client.client.put_bucket_notification_configuration.call_args_list[0], expected_res) + + @patch('boto3.Session') + def test_delete_bucket_notification(self, boto_session): + boto_session.return_value = self._init_mocks(['put_bucket_notification_configuration', + 'get_bucket_notification_configuration']) + s3 = S3({'lambda': {'arn': 'arn'}}) + s3.client.client.get_bucket_notification_configuration.return_value = {'LambdaFunctionConfigurations': []} + s3.client.client.put_bucket_notification_configuration.return_value = {} + s3.delete_bucket_notification('bucket') + expected_res = call(Bucket='bucket', NotificationConfiguration={'LambdaFunctionConfigurations': []}) + self.assertEqual(s3.client.client.put_bucket_notification_configuration.call_args_list[0], expected_res) + + @patch('boto3.Session') + def test_create_bucket_and_folders(self, boto_session): + boto_session.return_value = self._init_mocks(['get_bucket_location', 'create_bucket', 'put_object', 'get_object']) + s3 = S3({}) + s3.client.client.get_bucket_location.side_effect = ClientError({'Error': {'Code': 'NoSuchBucket'}}, 'op') + s3.client.client.create_bucket.return_value = {} + s3.client.client.put_object.return_value = {} + s3.client.client.get_object.return_value = {} + self.assertEqual(s3.create_bucket_and_folders('storage/path'), ('storage', 'path')) + + @patch('boto3.Session') + def test_download_file(self, boto_session): + boto_session.return_value = self._init_mocks(['download_fileobj']) + s3 = S3({}) + s3.client.client.download_fileobj.return_value = {} + s3.download_file('bucket', 'key', 'path') + self.assertEqual(s3.client.client.download_fileobj.call_args_list[0][1]['Bucket'], 'bucket') + self.assertEqual(s3.client.client.download_fileobj.call_args_list[0][1]['Key'], 'key') + self.assertIn('Fileobj', s3.client.client.download_fileobj.call_args_list[0][1]) + os.unlink('path') diff --git a/test/unit/oscar/test_client.py b/test/unit/oscar/test_client.py new file mode 100644 index 00000000..617b2ca9 --- /dev/null +++ b/test/unit/oscar/test_client.py @@ -0,0 +1,110 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.oscar.client import OSCARClient +from scar.exceptions import ServiceCreationError, ServiceDeletionError, ServiceNotFoundError, ListServicesError + +class TestOSCARClient(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + @patch('requests.post') + def test_create_serviced(self, post): + response = MagicMock(["status_code", "text"]) + response.status_code = 201 + post.return_value = response + oscar = OSCARClient({"endpoint": "url", "auth_user": "user", "auth_password": "pass", "ssl_verify": False}, "cid") + oscar.create_service(key="value") + self.assertEqual(post.call_args_list[0][0][0], "url/system/services") + self.assertEqual(post.call_args_list[0][1], {'auth': ('user', 'pass'), 'verify': False, 'json': {'key': 'value'}}) + + response.status_code = 401 + response.text = "Some error" + with self.assertRaises(ServiceCreationError) as ex: + oscar.create_service(name="sname") + self.assertEqual( + "Unable to create the service 'sname': Some error", + str(ex.exception) + ) + + @patch('requests.delete') + def test_delete_service(self, delete): + response = MagicMock(["status_code", "text"]) + response.status_code = 204 + delete.return_value = response + oscar = OSCARClient({"endpoint": "url", "auth_user": "user", "auth_password": "pass", "ssl_verify": False}, "cid") + oscar.delete_service("sname") + self.assertEqual(delete.call_args_list[0][0][0], "url/system/services/sname") + self.assertEqual(delete.call_args_list[0][1], {'auth': ('user', 'pass'), 'verify': False}) + + response.status_code = 401 + response.text = "Some error" + with self.assertRaises(ServiceDeletionError) as ex: + oscar.delete_service("sname") + self.assertEqual( + "Unable to delete the service 'sname': Some error", + str(ex.exception) + ) + + @patch('requests.get') + def test_get_service(self, get): + response = MagicMock(["status_code", "json"]) + response.status_code = 200 + response.json.return_value = {"key": "value"} + get.return_value = response + oscar = OSCARClient({"endpoint": "url", "auth_user": "user", "auth_password": "pass", "ssl_verify": False}, "cid") + self.assertEqual(oscar.get_service("sname"), {"key": "value"}) + self.assertEqual(get.call_args_list[0][0][0], "url/system/services/sname") + self.assertEqual(get.call_args_list[0][1], {'auth': ('user', 'pass'), 'verify': False}) + + response.status_code = 401 + response.text = "Some error" + with self.assertRaises(ServiceNotFoundError) as ex: + oscar.get_service("sname"), {"key": "value"} + self.assertEqual( + "The service 'sname' does not exist: Some error", + str(ex.exception) + ) + + @patch('requests.get') + def test_list_services(self, get): + response = MagicMock(["status_code", "json"]) + response.status_code = 200 + response.json.return_value = {"key": "value"} + get.return_value = response + oscar = OSCARClient({"endpoint": "url", "auth_user": "user", "auth_password": "pass", "ssl_verify": False}, "cid") + self.assertEqual(oscar.list_services(), {"key": "value"}) + self.assertEqual(get.call_args_list[0][0][0], "url/system/services") + self.assertEqual(get.call_args_list[0][1], {'auth': ('user', 'pass'), 'verify': False}) + + response.status_code = 401 + response.text = "Some error" + with self.assertRaises(ListServicesError) as ex: + oscar.list_services(), {"key": "value"} + self.assertEqual( + "Unable to list services from OSCAR cluster 'cid': Some error", + str(ex.exception) + ) diff --git a/test/unit/oscar/test_controller.py b/test/unit/oscar/test_controller.py new file mode 100644 index 00000000..55bf4c7b --- /dev/null +++ b/test/unit/oscar/test_controller.py @@ -0,0 +1,80 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +import tempfile +from mock import MagicMock +from mock import patch + +sys.path.append("..") +sys.path.append(".") +sys.path.append("../..") + +from scar.providers.oscar.controller import OSCAR + +class TestOSCARController(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + @patch('scar.providers.oscar.controller.OSCARClient') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_init(self, load_tmp_config_file, oscar_client): + tmpfile = tempfile.NamedTemporaryFile(delete=False) + tmpfile.write(b'Hello world!') + tmpfile.close() + load_tmp_config_file.return_value = {"functions": {"oscar": [{"my_oscar": {"name": "oname", + "script": tmpfile.name}}]}} + ocli = MagicMock(['create_service']) + oscar_client.return_value = ocli + + OSCAR('init') + + os.unlink(tmpfile.name) + res = {'name': 'oname', 'script': 'Hello world!', + 'cluster_id': 'my_oscar', 'storage_providers': {}} + self.assertEqual(ocli.create_service.call_args_list[0][1], res) + + @patch('scar.providers.oscar.controller.OSCARClient') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_rm(self, load_tmp_config_file, oscar_client): + load_tmp_config_file.return_value = {"functions": {"oscar": [{"my_oscar": {"name": "oname", + "script": "some.sh"}}]}} + ocli = MagicMock(['delete_service']) + oscar_client.return_value = ocli + + OSCAR('rm') + + self.assertEqual(ocli.delete_service.call_args_list[0][0][0], 'oname') + + @patch('scar.providers.oscar.controller.OSCARClient') + @patch('scar.providers.aws.controller.FileUtils.load_tmp_config_file') + def test_ls(self, load_tmp_config_file, oscar_client): + load_tmp_config_file.return_value = {"functions": {"oscar": [{"my_oscar": {"name": "oname", + "script": "some.sh", + "endpoint": "http://some.es", + "auth_user": "user", + "auth_password": "pass", + "ssl_verify": False}}]}} + ocli = MagicMock(['list_services']) + ocli.list_services.return_value = [{'name': 'fname', 'memory': '256Mi', + 'cpu': '1.0', 'image': 'some/image:tag'}] + oscar_client.return_value = ocli + + OSCAR('ls') + + self.assertEqual(ocli.list_services.call_count, 1) \ No newline at end of file diff --git a/test/unit/test_scarcli.py b/test/unit/test_scarcli.py new file mode 100644 index 00000000..7c6aaddc --- /dev/null +++ b/test/unit/test_scarcli.py @@ -0,0 +1,51 @@ +#! /usr/bin/python + +# Copyright (C) GRyCAP - I3M - UPV +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import unittest +import sys +import os +import tempfile +import yaml +from mock import patch + +sys.path.append("..") +sys.path.append(".") + +from scar.scarcli import main + + +class TestSCARCli(unittest.TestCase): + + def __init__(self, *args): + unittest.TestCase.__init__(self, *args) + + @patch('scar.scarcli.AWS') + @patch('scar.scarcli.OSCAR') + def test_main(self, oscar, aws): + tmpfile = tempfile.NamedTemporaryFile(delete=False, suffix=".yaml") + tmpfile.write(b"functions:\n") + tmpfile.write(b" aws:\n") + tmpfile.write(b" - lambda:\n") + tmpfile.write(b" name: func_name\n") + tmpfile.close() + sys.argv = ['scar', 'init', '-f', tmpfile.name] + main() + os.unlink(tmpfile.name) + self.assertEqual(aws.call_args_list[0][0], ('init',)) + self.assertEqual(oscar.call_args_list[0][0], ('init',)) + with open(os.environ['SCAR_TMP_CFG']) as f: + cfg_file = yaml.safe_load(f.read()) + self.assertEqual(cfg_file["functions"]["aws"][0]["api_gateway"]["boto_profile"], "default") + os.unlink(os.environ['SCAR_TMP_CFG'])