Skip to content

Commit

Permalink
Artifact export for GraphQLApi (aws#5250)
Browse files Browse the repository at this point in the history
* Artifact export for GraphQLApi

* format

* docstrings

* fix unit tests

* fix mypy issues

* improve search method signature
  • Loading branch information
ssenchenko authored and Leonardo Gama committed Jun 22, 2023
1 parent 856d382 commit 994974e
Show file tree
Hide file tree
Showing 4 changed files with 222 additions and 22 deletions.
138 changes: 131 additions & 7 deletions samcli/lib/package/packageable_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import logging
import os
import shutil
from typing import Dict, Optional, Union
from typing import Any, Dict, List, Optional, Tuple, Union, cast

import jmespath
from botocore.utils import set_value_from_jmespath
Expand Down Expand Up @@ -40,6 +40,7 @@
AWS_LAMBDA_LAYERVERSION,
AWS_SERVERLESS_API,
AWS_SERVERLESS_FUNCTION,
AWS_SERVERLESS_GRAPHQLAPI,
AWS_SERVERLESS_HTTPAPI,
AWS_SERVERLESS_LAYERVERSION,
AWS_SERVERLESS_STATEMACHINE,
Expand Down Expand Up @@ -89,7 +90,7 @@ class ResourceZip(Resource):
Base class representing a CloudFormation resource that can be exported
"""

RESOURCE_TYPE: Optional[str] = None
RESOURCE_TYPE: str = ""
PROPERTY_NAME: str = ""
PACKAGE_NULL_PROPERTY = True
# Set this property to True in base class if you want the exporter to zip
Expand Down Expand Up @@ -133,13 +134,23 @@ def export(self, resource_id: str, resource_dict: Optional[Dict], parent_dir: st
if temp_dir:
shutil.rmtree(temp_dir)

def do_export(self, resource_id, resource_dict, parent_dir):
def do_export(
self,
resource_id,
resource_dict,
parent_dir,
property_path: Optional[str] = None,
local_path: Optional[str] = None,
):
"""
Default export action is to upload artifacts and set the property to
S3 URL of the uploaded object
If code signing configuration is provided for function/layer, uploaded artifact
will be replaced by signed artifact location
"""
if property_path is None:
property_path = self.PROPERTY_NAME
uploader = cast(S3Uploader, self.uploader)
# code signer only accepts files which has '.zip' extension in it
# so package artifact with '.zip' if it is required to be signed
should_sign_package = self.code_signer.should_sign_package(resource_id)
Expand All @@ -148,16 +159,17 @@ def do_export(self, resource_id, resource_dict, parent_dir):
self.RESOURCE_TYPE,
resource_id,
resource_dict,
self.PROPERTY_NAME,
property_path,
parent_dir,
self.uploader,
uploader,
artifact_extension,
local_path,
)
if should_sign_package:
uploaded_url = self.code_signer.sign_package(
resource_id, uploaded_url, self.uploader.get_version_of_artifact(uploaded_url)
resource_id, uploaded_url, uploader.get_version_of_artifact(uploaded_url)
)
set_value_from_jmespath(resource_dict, self.PROPERTY_NAME, uploaded_url)
set_value_from_jmespath(resource_dict, property_path, uploaded_url)

def delete(self, resource_id, resource_dict):
"""
Expand Down Expand Up @@ -585,6 +597,116 @@ def get_property_value(self, resource_dict):
return jmespath.search(self.PROPERTY_NAME, resource_dict)


class GraphQLApiSchemaResource(ResourceZip):
RESOURCE_TYPE = AWS_SERVERLESS_GRAPHQLAPI
PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][0]
# Don't package the directory if SchemaUri is omitted.
# Necessary to support SchemaInline
PACKAGE_NULL_PROPERTY = False


class GraphQLApiCodeResource(ResourceZip):
"""CodeUri for GraphQLApi resource.
There can be more than a single instance of CodeUri property in GraphQLApi Resolvers and Functions.
This class handles them all.
GraphQLApi dict shape looks like the following (yaml representation)
>>> Resolvers:
Mutation:
Resolver1:
CodeUri: ...
Pipeline:
- Func1
- Func2
Query:
Resolver2:
CodeUri: ...
Pipeline:
- Func3
Functions:
Func1:
CodeUri: ...
Func2:
CodeUri: ...
Func3:
CodeUri: ...
... # other properties, which are not important here
"""

RESOURCE_TYPE = AWS_SERVERLESS_GRAPHQLAPI
PROPERTY_NAME = RESOURCES_WITH_LOCAL_PATHS[RESOURCE_TYPE][1]
# if CodeUri is omitted the directory is not packaged because it's necessary to support CodeInline
PACKAGE_NULL_PROPERTY = False

def export(self, resource_id: str, resource_dict: Optional[Dict], parent_dir: str):
if resource_dict is None:
return

if resource_not_packageable(resource_dict):
return

# to be able to set different nested properties to S3 uri, paths are necessary
# jmespath doesn't provide that functionality, thus custom implementation
paths_values = self._find_all_with_property_name(resource_dict)
for property_path, property_value in paths_values:
if isinstance(property_value, dict):
LOG.debug("Property %s of %s resource is not a URL", self.PROPERTY_NAME, resource_id)
return

# If property is a file but not a zip file, place file in temp
# folder and send the temp folder to be zipped
temp_dir = None
if is_local_file(property_value) and not is_zip_file(property_value) and self.FORCE_ZIP:
temp_dir = copy_to_temp_dir(property_value)
set_value_from_jmespath(resource_dict, property_path, temp_dir)

try:
self.do_export(
resource_id, resource_dict, parent_dir, property_path=property_path, local_path=property_value
)

except Exception as ex:
LOG.debug("Unable to export", exc_info=ex)
raise exceptions.ExportFailedError(
resource_id=resource_id, property_name=property_path, property_value=property_value, ex=ex
)
finally:
if temp_dir:
shutil.rmtree(temp_dir)

def _find_all_with_property_name(self, graphql_dict: Dict[str, Any]) -> List[Tuple[str, Union[str, Dict]]]:
"""Find paths to the all properties with self.PROPERTY_NAME name and their (properties) values.
It leverages the knowledge of GraphQLApi structure instead of doing generic search in the graph.
Parameters
----------
graphql_dict
GraphQLApi resource dict
Returns
-------
list of tuple (path, value) for all found properties which has property_name
"""
# need to look up only in "Resolvers" and "Functions" subtrees
resolvers_and_functions = {k: graphql_dict[k] for k in ("Resolvers", "Functions") if k in graphql_dict}
stack: List[Tuple[Dict[str, Any], str]] = [(resolvers_and_functions, "")]
paths_values: List[Tuple[str, Union[str, Dict]]] = []

while stack:
node, path = stack.pop()
if isinstance(node, dict):
for key, value in node.items():
if key == self.PROPERTY_NAME:
paths_values.append((f"{path}{key}", value))
elif isinstance(value, dict):
stack.append((value, f"{path}{key}."))
# there is no need to handle lists because
# paths to "CodeUri" within "Resolvers" and "Functions" doesn't have lists
return paths_values


RESOURCES_EXPORT_LIST = [
ServerlessFunctionResource,
ServerlessFunctionImageResource,
Expand All @@ -610,6 +732,8 @@ def get_property_value(self, resource_dict):
CloudFormationModuleVersionModulePackage,
CloudFormationResourceVersionSchemaHandlerPackage,
ECRResource,
GraphQLApiSchemaResource,
GraphQLApiCodeResource,
]

METADATA_EXPORT_LIST = [ServerlessRepoApplicationReadme, ServerlessRepoApplicationLicense]
Expand Down
19 changes: 10 additions & 9 deletions samcli/lib/package/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def is_path_value_valid(path):
return isinstance(path, str)


def make_abs_path(directory, path):
def make_abs_path(directory: str, path: str) -> str:
if is_path_value_valid(path) and not os.path.isabs(path):
return os.path.normpath(os.path.join(directory, path))
return path
Expand Down Expand Up @@ -130,10 +130,11 @@ def upload_local_artifacts(
resource_type: str,
resource_id: str,
resource_dict: Dict,
property_name: str,
property_path: str,
parent_dir: str,
uploader: S3Uploader,
extension: Optional[str] = None,
local_path: Optional[str] = None,
) -> str:
"""
Upload local artifacts referenced by the property at given resource and
Expand All @@ -150,28 +151,28 @@ def upload_local_artifacts(
:param resource_type: Type of the CloudFormation resource
:param resource_id: Id of the CloudFormation resource
:param resource_dict: Dictionary containing resource definition
:param property_name: Property name of CloudFormation resource where this
:param property_path: Json path to the property of SAM or CloudFormation resource where the
local path is present
:param parent_dir: Resolve all relative paths with respect to this
directory
:param uploader: Method to upload files to S3
:param extension: Extension of the uploaded artifact
:param local_path: Local path for the cases when search return more than single result
:return: S3 URL of the uploaded object
:raise: ValueError if path is not a S3 URL or a local path
"""

local_path = jmespath.search(property_name, resource_dict)

if local_path is None:
# Build the root directory and upload to S3
local_path = parent_dir
# if local_path is not passed and search returns nothing
# build the root directory and upload to S3
local_path = jmespath.search(property_path, resource_dict) or parent_dir

if is_s3_protocol_url(local_path):
# A valid CloudFormation template will specify artifacts as S3 URLs.
# This check is supporting the case where your resource does not
# refer to local artifacts
# Nothing to do if property value is an S3 URL
LOG.debug("Property %s of %s is already a S3 URL", property_name, resource_id)
LOG.debug("Property %s of %s is already a S3 URL", property_path, resource_id)
return cast(str, local_path)

local_path = make_abs_path(parent_dir, local_path)
Expand All @@ -189,7 +190,7 @@ def upload_local_artifacts(
if is_local_file(local_path):
return uploader.upload_with_dedup(local_path)

raise InvalidLocalPathError(resource_id=resource_id, property_name=property_name, local_path=local_path)
raise InvalidLocalPathError(resource_id=resource_id, property_name=property_path, local_path=local_path)


def resource_not_packageable(resource_dict):
Expand Down
14 changes: 12 additions & 2 deletions samcli/lib/utils/resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
AWS_SERVERLESS_APPLICATION = "AWS::Serverless::Application"

AWS_SERVERLESSREPO_APPLICATION = "AWS::ServerlessRepo::Application"
AWS_SERVERLESS_GRAPHQLAPI = "AWS::Serverless::GraphQLApi"
AWS_APPSYNC_GRAPHQLSCHEMA = "AWS::AppSync::GraphQLSchema"
AWS_APPSYNC_RESOLVER = "AWS::AppSync::Resolver"
AWS_APPSYNC_FUNCTIONCONFIGURATION = "AWS::AppSync::FunctionConfiguration"
Expand All @@ -61,12 +62,17 @@
METADATA_WITH_LOCAL_PATHS = {AWS_SERVERLESSREPO_APPLICATION: ["LicenseUrl", "ReadmeUrl"]}

RESOURCES_WITH_LOCAL_PATHS = {
AWS_SERVERLESS_GRAPHQLAPI: ["SchemaUri", "CodeUri"],
AWS_SERVERLESS_FUNCTION: ["CodeUri"],
AWS_SERVERLESS_API: ["DefinitionUri"],
AWS_SERVERLESS_HTTPAPI: ["DefinitionUri"],
AWS_SERVERLESS_STATEMACHINE: ["DefinitionUri"],
AWS_APPSYNC_GRAPHQLSCHEMA: ["DefinitionS3Location"],
AWS_APPSYNC_RESOLVER: ["RequestMappingTemplateS3Location", "ResponseMappingTemplateS3Location", "CodeS3Location"],
AWS_APPSYNC_RESOLVER: [
"RequestMappingTemplateS3Location",
"ResponseMappingTemplateS3Location",
"CodeS3Location",
],
AWS_APPSYNC_FUNCTIONCONFIGURATION: [
"RequestMappingTemplateS3Location",
"ResponseMappingTemplateS3Location",
Expand Down Expand Up @@ -133,7 +139,11 @@ def get_packageable_resource_paths():
Resource Dictionary containing packageable resource types and their locations as a list.
"""
_resource_property_dict = defaultdict(list)
for _dict in (METADATA_WITH_LOCAL_PATHS, RESOURCES_WITH_LOCAL_PATHS, RESOURCES_WITH_IMAGE_COMPONENT):
for _dict in (
METADATA_WITH_LOCAL_PATHS,
RESOURCES_WITH_LOCAL_PATHS,
RESOURCES_WITH_IMAGE_COMPONENT,
):
for key, value in _dict.items():
# Only add values to the list if they are different, same property name could be used with the resource
# to package to different locations.
Expand Down
Loading

0 comments on commit 994974e

Please sign in to comment.