From 204e6ba4bb5b45dcd3eb496e0f356e979f5454ac Mon Sep 17 00:00:00 2001 From: ck-c8y Date: Tue, 28 Jan 2025 08:23:58 +0100 Subject: [PATCH] replace accessToken when repositories are returned --- analytics-service/c8y_agent.py | 191 +++++++------ analytics-service/flask_wrapper.py | 263 ++++++++++++++---- analytics-ui/src/shared/repository.service.ts | 20 +- 3 files changed, 325 insertions(+), 149 deletions(-) diff --git a/analytics-service/c8y_agent.py b/analytics-service/c8y_agent.py index 2722779..33989a0 100644 --- a/analytics-service/c8y_agent.py +++ b/analytics-service/c8y_agent.py @@ -1,4 +1,5 @@ import logging +from xmlrpc.client import boolean from dotenv import load_dotenv from c8y_api.app import MultiTenantCumulocityApp from c8y_api.model import Binary, TenantOption @@ -25,12 +26,14 @@ import json from typing import Dict, List, Optional, Set, Tuple + class C8YAgent: # Constants + DUMMY_ACCESS_TOKEN = "_DUMMY_ACCESS_CODE_" PATHS = { - 'CEP_DIAGNOSTICS': "/service/cep/diagnostics/apamaCtrlStatus", - 'TENANT_OPTIONS': "/tenant/options", - 'CEP_RESTART': "/service/cep/restart" + "CEP_DIAGNOSTICS": "/service/cep/diagnostics/apamaCtrlStatus", + "TENANT_OPTIONS": "/tenant/options", + "CEP_RESTART": "/service/cep/restart", } ANALYTICS_MANAGEMENT_REPOSITORIES = "analytics-management.repository" @@ -40,9 +43,9 @@ def __init__(self): load_dotenv() self.c8y_app = MultiTenantCumulocityApp() - def _get_tenant_instance(self, headers: Dict) -> any: + def _get_tenant_instance(self, headers: Dict, cookies: Dict) -> any: """Get tenant instance with error handling""" - return self.c8y_app.get_tenant_instance(headers=headers) + return self.c8y_app.get_tenant_instance(headers=headers, cookies=cookies) def _handle_request(self, func, *args, **kwargs) -> Tuple[Dict, int]: """Generic request handler with error handling""" @@ -54,9 +57,9 @@ def _handle_request(self, func, *args, **kwargs) -> Tuple[Dict, int]: return {"error": str(e)}, 500 def upload_extension(self, request, extension_name: str, ext_file) -> str: - headers = self.prepare_header(request) + [headers, cookies] = self.prepare_header(request) binary = Binary( - c8y=self._get_tenant_instance(headers), + c8y=self._get_tenant_instance(headers, cookies), type="application/zip", name=extension_name, file=ext_file, @@ -67,19 +70,18 @@ def upload_extension(self, request, extension_name: str, ext_file) -> str: def restart_cep(self, request) -> None: """ Attempt to restart CEP, ignoring any errors that occur. - + Args: request: The incoming request object """ try: - headers = self.prepare_header(request) + [headers, cookies] = self.prepare_header(request) self._logger.info("Attempting to restart CEP...") - - self._get_tenant_instance(headers).put( - resource=self.PATHS['CEP_RESTART'], - json={} + + self._get_tenant_instance(headers, cookies).put( + resource=self.PATHS["CEP_RESTART"], json={} ) - + except Exception as e: # Log the error but don't raise it self._logger.warning(f"Non-critical error during CEP restart: {str(e)}") @@ -88,52 +90,54 @@ def restart_cep(self, request) -> None: self._logger.info("CEP restart procedure completed") def get_cep_operationobject_id(self, request) -> Optional[Dict]: - headers = self.prepare_header(request) - # response = self._get_tenant_instance(headers).get( + [headers, cookies] = self.prepare_header(request) + # response = self._get_tenant_instance(headers,cookies).get( # resource=self.PATHS['CEP_DIAGNOSTICS'] # ) - - ti = self._get_tenant_instance(headers) + + ti = self._get_tenant_instance(headers, cookies) self._logger.info(f"Updated get_cep_operationobject_id: {ti}") self._logger.info(f"Updated path: {self.PATHS['CEP_DIAGNOSTICS']}") - response = ti.get( - resource=self.PATHS['CEP_DIAGNOSTICS'] - ) - + response = ti.get(resource=self.PATHS["CEP_DIAGNOSTICS"]) + app_id = response.get("microservice_application_id") microservice_name = response.get("microservice_name") - + if not all([app_id, microservice_name]): return None query = f"applicationId eq '{app_id}' and name eq '{microservice_name}'" - managed_objects = self._get_tenant_instance(headers).inventory.select(query=query) - + managed_objects = self._get_tenant_instance(headers, cookies).inventory.select( + query=query + ) + for managed_object in managed_objects: return {"id": managed_object.id} - + return None def get_cep_ctrl_status(self, request) -> Dict: - headers = self.prepare_header(request) - return self._get_tenant_instance(headers).get( - resource=self.PATHS['CEP_DIAGNOSTICS'] + [headers, cookies] = self.prepare_header(request) + return self._get_tenant_instance(headers, cookies).get( + resource=self.PATHS["CEP_DIAGNOSTICS"] ) - - def _process_repository_data(self, repo_data: Union[Dict, str, 'TenantOption'], repository_id: str = None) -> Dict: + + def _process_repository_data( + self, repo_data: Union[Dict, str, "TenantOption"], repository_id: str = None, replace_access_token: boolean = True + ) -> Dict: """ Process repository data into standard format. - + Args: repo_data: Repository data as either a dictionary, JSON string, or TenantOption repository_id: Optional repository ID - + Returns: Dictionary containing processed repository data """ try: # Handle TenantOption input - if hasattr(repo_data, 'value'): + if hasattr(repo_data, "value"): try: value_dict = json.loads(repo_data.value) except json.JSONDecodeError: @@ -142,7 +146,7 @@ def _process_repository_data(self, repo_data: Union[Dict, str, 'TenantOption'], "name": repo_data.value, # Use the value as name "url": "", "accessToken": "", - "enabled": False + "enabled": False, } # Handle string input elif isinstance(repo_data, str): @@ -154,22 +158,26 @@ def _process_repository_data(self, repo_data: Union[Dict, str, 'TenantOption'], "name": repo_data, # Use the string as name "url": "", "accessToken": "", - "enabled": False + "enabled": False, } # Handle dict input elif isinstance(repo_data, dict): - value_dict = json.loads(repo_data.get('value', '{}')) + value_dict = json.loads(repo_data.get("value", "{}")) else: raise ValueError(f"Unsupported repo_data type: {type(repo_data)}") - # Process the dictionary - return { + result = { "id": repository_id or value_dict.get("id"), "name": value_dict.get("name", ""), "url": value_dict.get("url", ""), "accessToken": value_dict.get("accessToken", ""), - "enabled": value_dict.get("enabled", False) + "enabled": value_dict.get("enabled", False), } + # If there's an access token and replace_access_token, replace it with dummy + if value_dict.get("accessToken") and replace_access_token: + result["accessToken"] = self.DUMMY_ACCESS_TOKEN + # Process the dictionary + return result except Exception as e: self._logger.error(f"Error processing repository data: {e}", exc_info=True) # Return a basic dict with default values in case of error @@ -178,42 +186,47 @@ def _process_repository_data(self, repo_data: Union[Dict, str, 'TenantOption'], "name": str(repo_data)[:100], # Truncate long strings "url": "", "accessToken": "", - "enabled": False + "enabled": False, } def load_repositories(self, request) -> List[Dict]: - headers = self.prepare_header(request) - tenant = self._get_tenant_instance(headers) + [headers, cookies] = self.prepare_header(request) + tenant = self._get_tenant_instance(headers, cookies) tenant_options = tenant.tenant_options.get_all( category=self.ANALYTICS_MANAGEMENT_REPOSITORIES - ) + ) return [ - self._process_repository_data(option, option.key) + self._process_repository_data(option, option.key, True) for option in tenant_options ] - def load_repository(self, request, repository_id: str) -> Dict: - headers = self.prepare_header(request) - tenant = self._get_tenant_instance(headers) + def load_repository( + self, request, repository_id: str, replace_access_token: boolean + ) -> Dict: + [headers, cookies] = self.prepare_header(request) + tenant = self._get_tenant_instance(headers, cookies) tenant_option = tenant.tenant_options.get( - category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, - key=repository_id + category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, key=repository_id ) - # Print various attributes of the TenantOption object + # Print various attributes of the TenantOption object print(f"TenantOption contents:") print(f"Category: {tenant_option.category}") print(f"Key: {tenant_option.key}") print(f"Value: {tenant_option.value}") - + # Print the entire object print(f"Complete TenantOption object: {vars(tenant_option)}") - return self._process_repository_data(tenant_option, repository_id) + return self._process_repository_data( + tenant_option, repository_id, replace_access_token + ) - def update_repositories(self, request, repositories: List[Dict]) -> Tuple[Dict, int]: + def update_repositories( + self, request, repositories: List[Dict] + ) -> Tuple[Dict, int]: try: - headers = self.prepare_header(request) - tenant = self._get_tenant_instance(headers) - + [headers, cookies] = self.prepare_header(request) + tenant = self._get_tenant_instance(headers, cookies) + existing_repos = self.load_repositories(request) new_repo_ids = {repo.get("id") for repo in repositories} existing_repo_ids = {repo.get("id") for repo in existing_repos} @@ -234,29 +247,51 @@ def update_repositories(self, request, repositories: List[Dict]) -> Tuple[Dict, def _update_single_repository(self, tenant, repository: Dict) -> None: """Helper method to update a single repository""" - value_dict = { - "name": repository.get("name"), - "url": repository.get("url"), - "enabled": bool(repository.get("enabled", False)) - } - if repository.get("accessToken"): - value_dict["accessToken"] = repository["accessToken"] - - option = TenantOption( - category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, - key=repository.get("id"), - value=json.dumps(value_dict) - ) - tenant.tenant_options.create(option) - self._logger.info(f"Updated repository: {repository.get('id')}") + try: + # If the access token is the dummy, get the original from existing repository + if repository.get("accessToken") == self.DUMMY_ACCESS_TOKEN: + existing_repo = tenant.tenant_options.get( + category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, + key=repository.get("id"), + ) + if existing_repo: + existing_data = json.loads(existing_repo.value) + access_token = existing_data.get("accessToken", "") + else: + access_token = "" + else: + access_token = repository.get("accessToken", "") + + value_dict = { + "name": repository.get("name"), + "url": repository.get("url"), + "enabled": bool(repository.get("enabled", False)), + } + + # Only add access token if it exists + if access_token: + value_dict["accessToken"] = access_token + + option = TenantOption( + category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, + key=repository.get("id"), + value=json.dumps(value_dict), + ) + tenant.tenant_options.create(option) + self._logger.info(f"Updated repository: {repository.get('id')}") + + except Exception as e: + self._logger.error( + f"Failed to update repository {repository.get('id')}: {str(e)}" + ) + raise def _delete_repositories(self, tenant, repo_ids: Set[str]) -> None: """Helper method to delete multiple repositories""" for repo_id in repo_ids: try: tenant.tenant_options.delete_by( - category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, - key=repo_id + category=self.ANALYTICS_MANAGEMENT_REPOSITORIES, key=repo_id ) self._logger.info(f"Deleted repository: {repo_id}") except Exception as e: @@ -264,7 +299,7 @@ def _delete_repositories(self, tenant, repo_ids: Set[str]) -> None: @staticmethod def prepare_header(request) -> Dict: - headers = dict(request.headers) - if "authorization" in request.cookies: - headers["Authorization"] = f"Bearer {request.cookies['authorization']}" - return headers \ No newline at end of file + # headers = dict(request.headers) + # if "authorization" in request.cookies: + # headers["Authorization"] = f"Bearer {request.cookies['authorization']}" + return [request.headers, request.cookies] diff --git a/analytics-service/flask_wrapper.py b/analytics-service/flask_wrapper.py index 5dc4c80..2b7ca59 100644 --- a/analytics-service/flask_wrapper.py +++ b/analytics-service/flask_wrapper.py @@ -25,6 +25,7 @@ app = Flask(__name__) agent = C8YAgent() + # Error handling decorator def handle_errors(f): @wraps(f) @@ -38,100 +39,188 @@ def wrapper(*args, **kwargs): except Exception as e: logger.error("Unexpected error", exc_info=True) return create_error_response(str(e), 500) + return wrapper + def create_error_response(message: str, status_code: int) -> Response: return Response( json.dumps({"message": f"Error: {message}"}), status=status_code, - mimetype="application/json" + mimetype="application/json", ) -def get_repository_headers(request, repository_id: Optional[str] = None) -> Dict[str, str]: + +def get_repository_headers( + request, repository_id: Optional[str] = None +) -> Dict[str, str]: headers = {"Accept": "application/vnd.github.v3.raw"} if repository_id: - repository_configuration = agent.load_repository(request=request, repository_id=repository_id) + repository_configuration = agent.load_repository( + request=request, repository_id=repository_id, replace_access_token=False + ) if "accessToken" in repository_configuration: - headers["Authorization"] = f"token {repository_configuration['accessToken']}" + headers["Authorization"] = ( + f"token {repository_configuration['accessToken']}" + ) logger.info("Access token found and added to headers") return headers + @app.route("/health") def health(): return jsonify({"status": "UP"}) + @app.route("/repository/contentList", methods=["GET"]) @handle_errors def get_content_list(): + """ + Retrieve a list of contents from a specified URL in a repository. + + Args (via query parameters): + url (str): Encoded URL of the repository content to list + id (str, optional): Repository ID used to fetch authentication headers + + Returns: + Response: JSON response containing the list of contents with status code 200 + Content-Type: application/json + + Raises: + HTTPError: If the request to the repository fails + + Example: + GET /repository/contentList?url=https%3A%2F%2Fapi.github.com%2Frepos%2Fowner%2Frepo%2Fcontents&id=repo123 + """ encoded_url = request.args.get("url") repository_id = request.args.get("id") headers = {"Accept": "application/json"} - + if repository_id: headers = get_repository_headers(request, repository_id) - + decoded_url = urllib.parse.unquote(encoded_url) logger.info(f"Getting content list from: {decoded_url}") - + response = requests.get(decoded_url, headers=headers, allow_redirects=True) response.raise_for_status() - + return make_response(response.content, 200, {"Content-Type": "application/json"}) + @app.route("/repository/content", methods=["GET"]) @handle_errors def get_content(): + """ + Download content from GitHub. + + Args: + url (str): URL of monitor to download + extract_fqn_cep_block (bool): Whether to extract the FQN name from the monitor file + cep_block_name (str): Block name, required to return the FQN name, including the Apama package + repository_id (str): ID of the repository + + Returns: + Response: Either the monitor content or FQN name + """ encoded_url = request.args.get("url") cep_block_name = request.args.get("cep_block_name") repository_id = request.args.get("repository_id") - extract_fqn_cep_block = parse_boolean(request.args.get("extract_fqn_cep_block", False)) - + extract_fqn_cep_block = parse_boolean( + request.args.get("extract_fqn_cep_block", False) + ) + headers = get_repository_headers(request, repository_id) decoded_url = urllib.parse.unquote(encoded_url) - + response = requests.get(decoded_url, headers=headers, allow_redirects=True) response.raise_for_status() - + if extract_fqn_cep_block: package = re.findall(r"(package\s)(.*?);", response.text) if not package: raise ValueError("Package name not found in monitor file") fqn = f"{package[0][1]}.{cep_block_name}" return make_response(fqn, 200, {"Content-Type": "text/plain"}) - + return make_response(response.content, 200, {"Content-Type": "text/plain"}) -# load the configured repositories -# params: @app.route("/repository/configuration", methods=["GET"]) @handle_errors def load_repositories(): + """ + Load the configured repositories. + + Returns: + Response: JSON list of configured repositories or error response + """ result = agent.load_repositories(request) if result is None: return create_error_response("No repositories found", 404) return jsonify(result) + + @app.route("/repository/configuration", methods=["POST"]) @handle_errors def update_repositories(): - repositories = request.get_json() + """ + Update all the configured repositories. + Returns: + Response: Result of the update operation + """ + repositories = request.get_json() + # Validate input if not isinstance(repositories, list): - return create_error_response("Request body must be an array of repositories", 400) - + return create_error_response( + "Request body must be an array of repositories", 400 + ) + # Validate repository format required_fields = {"id", "name", "url"} for repo in repositories: if not all(field in repo for field in required_fields): - return create_error_response("Each repository must have id, name, and url", 400) - + return create_error_response( + "Each repository must have id, name, and url", 400 + ) + return agent.update_repositories(request, repositories) + @app.route("/extension", methods=["POST"]) @handle_errors def create_extension_zip(): + """ + Get details for a specific extension. + + Args: + name (str): Name of the extension + + Returns: + Response: JSON object containing extension details with structure: + { + "name": str, + "analytics": List[{ + "id": str, + "name": str, + "installed": bool, + "producesOutput": str, + "description": str, + "url": str, + "downloadUrl": str, + "path": str, + "custom": bool, + "extension": str, + "repositoryName": str, + "category": str + }], + "version": str, + "loaded": bool + } + """ data = request.get_json() extension_name = data.get("extension_name") monitors = data.get("monitors", []) @@ -147,15 +236,12 @@ def create_extension_zip(): try: file_name = extract_raw_path(monitor["downloadUrl"]) repository_configuration = agent.load_repository( - request=request, - repository_id=monitor["repositoryId"] + request=request, repository_id=monitor["repositoryId"], replace_access_token=False ) - + headers = get_repository_headers(request, monitor["repositoryId"]) response = requests.get( - monitor["url"], - headers=headers, - allow_redirects=True + monitor["url"], headers=headers, allow_redirects=True ) response.raise_for_status() @@ -166,22 +252,27 @@ def create_extension_zip(): except Exception as e: logger.error(f"Error downloading monitor: {monitor}", exc_info=True) - return create_error_response(f"Failed to download monitor: {str(e)}", 400) + return create_error_response( + f"Failed to download monitor: {str(e)}", 400 + ) # Create extension result_extension_file = f"{extension_name}.zip" result_extension_absolute = os.path.join(work_temp_dir, result_extension_file) try: - subprocess.run([ - "/apama_work/apama-analytics-builder-block-sdk/analytics_builder", - "build", - "extension", - "--input", - work_temp_dir, - "--output", - result_extension_absolute - ], check=True) + subprocess.run( + [ + "/apama_work/apama-analytics-builder-block-sdk/analytics_builder", + "build", + "extension", + "--input", + work_temp_dir, + "--output", + result_extension_absolute, + ], + check=True, + ) except subprocess.CalledProcessError as e: return create_error_response(f"Failed to build extension: {str(e)}", 500) @@ -193,61 +284,102 @@ def create_extension_zip(): io.BytesIO(extension_zip.read()), mimetype="application/zip", as_attachment=True, - download_name=result_extension_file + download_name=result_extension_file, ) else: - id = agent.upload_extension( - request, - extension_name, - extension_zip - ) + id = agent.upload_extension(request, extension_name, extension_zip) logger.info(f"Uploaded extension {extension_name} as {id}") - + if deploy: agent.restart_cep(request) - + return "", 201 except Exception as e: return create_error_response(f"Failed to process extension: {str(e)}", 500) + @app.route("/cep/extension/", methods=["GET"]) @handle_errors def get_extension(name: str): """ Get details for a specific extension. - Returns information about monitors included in the extension. + + Args: + name (str): Name of monitor to download + + Returns: + Response: JSON object containing extension details with the following structure: + + CEP_Extension: + { + "name": str, + "analytics": List[CEP_Block], + "version": str, + "loaded": bool + } + + Where CEP_Block is: + { + "id": str, + "name": str, + "installed": bool, + "producesOutput": str, + "description": str, + "url": str, + "downloadUrl": str, + "path": str, + "custom": bool, + "extension": str, + "repositoryName": str, + "category": Category + } """ # Implement actual extension retrieval logic cep_extension = {"name": name} # Placeholder return jsonify(cep_extension) + @app.route("/cep/extension", methods=["GET"]) @handle_errors def get_extension_metadata(): """ Get details on all loaded extensions. - Returns a list of all loaded extensions with their metadata. + + Returns: + Response: JSON object containing extension metadata with structure: + { + "metadatas": List[str], + "messages": List[str] + } """ # Implement actual metadata retrieval logic cep_extension_metadata = [] # Placeholder return jsonify(cep_extension_metadata) + @app.route("/cep/id", methods=["GET"]) @handle_errors def get_cep_operationobject_id(): """ Get the managedObject that represents the CEP ctrl microservice. + + Returns: + Response: JSON object containing the CEP operation object ID """ result = agent.get_cep_operationobject_id(request) if result is None: return create_error_response("CEP operation object not found", 404) return jsonify(result) + @app.route("/cep/status", methods=["GET"]) @handle_errors def get_cep_ctrl_status(): """ - Get status of CEP ctrl microservice. + Get the managedObject that represents the CEP ctrl microservice. + + Returns: + Response: JSON object containing the CEP operation object ID """ result = agent.get_cep_ctrl_status(request) if result is None: @@ -258,7 +390,7 @@ def get_cep_ctrl_status(): class ExtensionBuilder: """Helper class for building extensions""" - + def __init__(self, work_dir: str, extension_name: str): self.work_dir = work_dir self.extension_name = extension_name @@ -267,32 +399,39 @@ def __init__(self, work_dir: str, extension_name: str): def build(self) -> None: """Build the extension using analytics_builder""" - subprocess.run([ - "/apama_work/apama-analytics-builder-block-sdk/analytics_builder", - "build", - "extension", - "--input", - self.work_dir, - "--output", - self.extension_path - ], check=True) + subprocess.run( + [ + "/apama_work/apama-analytics-builder-block-sdk/analytics_builder", + "build", + "extension", + "--input", + self.work_dir, + "--output", + self.extension_path, + ], + check=True, + ) def get_file_path(self) -> str: """Get the path to the built extension file""" return self.extension_path + class MonitorDownloader: """Helper class for downloading monitors""" - + @staticmethod - async def download_monitor(url: str, headers: Dict[str, str], target_path: str) -> None: + async def download_monitor( + url: str, headers: Dict[str, str], target_path: str + ) -> None: """Download a monitor file""" response = requests.get(url, headers=headers, allow_redirects=True) response.raise_for_status() - + with open(target_path, "wb") as f: f.write(response.content) + def parse_boolean(value: Any) -> bool: if isinstance(value, bool): return value @@ -300,8 +439,10 @@ def parse_boolean(value: Any) -> bool: return value.lower() == "true" return False + def extract_raw_path(path: str) -> str: return path.rsplit("/", 1)[-1] + if __name__ == "__main__": - app.run(host="0.0.0.0", port=80, debug=False) \ No newline at end of file + app.run(host="0.0.0.0", port=80, debug=False) diff --git a/analytics-ui/src/shared/repository.service.ts b/analytics-ui/src/shared/repository.service.ts index 7649ed3..8ef0183 100644 --- a/analytics-ui/src/shared/repository.service.ts +++ b/analytics-ui/src/shared/repository.service.ts @@ -1,22 +1,22 @@ // repository.service.ts +import { HttpClient, HttpErrorResponse } from '@angular/common/http'; import { Injectable } from '@angular/core'; -import { BehaviorSubject, combineLatest, EMPTY, forkJoin, from, merge, Observable, of } from 'rxjs'; +import { + FetchClient, +} from '@c8y/client'; +import { AlertService, gettext } from '@c8y/ngx-components'; +import * as _ from 'lodash'; +import { BehaviorSubject, EMPTY, forkJoin, from, Observable, of } from 'rxjs'; +import { catchError, combineLatestWith, map, shareReplay, switchMap, take, tap } from 'rxjs/operators'; import { BACKEND_PATH_BASE, CEP_Block, + Repository, REPOSITORY_CONFIGURATION_ENDPOINT, REPOSITORY_CONTENT_ENDPOINT, - REPOSITORY_CONTENT_LIST_ENDPOINT, - Repository + REPOSITORY_CONTENT_LIST_ENDPOINT } from './analytics.model'; -import { - FetchClient, -} from '@c8y/client'; -import { AlertService, gettext } from '@c8y/ngx-components'; -import { HttpClient, HttpErrorResponse } from '@angular/common/http'; -import { catchError, map, switchMap, combineLatestWith, tap, shareReplay, take } from 'rxjs/operators'; -import * as _ from 'lodash'; import { AnalyticsService } from './analytics.service'; import { getFileExtension, removeFileExtension } from './utils';