diff --git a/Dockerfile b/Dockerfile index 237619d..1e9fb52 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,4 +12,4 @@ COPY . . EXPOSE 5000 -CMD [ "flask", "run", "--host=0.0.0.0", "--port=5000"] \ No newline at end of file +CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "5000"] diff --git a/Makefile b/Makefile index 43e7d3b..56d1e22 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ local-dev: echo "\nDEPLOYMENT_HOST='localhost' # DO NOT EDIT THIS" >> .env bash setup-vault.sh docker-compose-local.yml echo "\nVAULT_BASE_URL='http://localhost:8200'" >> .env - mkdir deployments nginx-confs + mkdir -p deployments nginx-confs docker-compose -f docker-compose-local.yml up -d nginx .PHONY: sarthi diff --git a/app.py b/app.py index cbca40d..74f0c3e 100644 --- a/app.py +++ b/app.py @@ -4,67 +4,85 @@ import jwt from dotenv import load_dotenv -from flask import Flask, jsonify, request -from flask_httpauth import HTTPTokenAuth +from fastapi import Depends, FastAPI, HTTPException, Request +from fastapi.responses import JSONResponse +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +import server.constants as constants from server.deployer import Deployer, DeploymentConfig load_dotenv() -if (os.environ.get("ENV") or "local").lower() == "local": - logging.basicConfig(level=logging.NOTSET) +app = FastAPI() +security = HTTPBearer() +app.config = {"SECRET_TEXT": os.environ.get("SECRET_TEXT")} +env = os.environ.get("ENV").upper() == constants.LOCAL +logging.basicConfig(level=logging.DEBUG if env else logging.INFO) -app = Flask(__name__) -auth = HTTPTokenAuth("Bearer") -app.config["SECRET_TEXT"] = os.environ.get("SECRET_TEXT") - -@auth.verify_token -def verify_token(token): +async def verify_token(credentials: HTTPAuthorizationCredentials = Depends(security)): + token = credentials.credentials try: data = jwt.decode(token, app.config["SECRET_TEXT"], algorithms=["HS256"]) logging.debug(f"Authenticated successfully {data}") - except Exception as e: # noqa: E722 - logging.debug(f"Error while authenticating {e}") - return False - return True - - -# Your deployment endpoint -@app.route("/deploy", methods=["POST", "DELETE"]) -@auth.login_required -def deploy(): - data = request.get_json() - - # Create DeploymentConfig object - project_name = urlparse(data.get("project_git_url")).path[ - :-4 - ] # remove .git from the end + except Exception as e: + logging.info(f"Error while authenticating {e}") + raise HTTPException(status_code=401, detail="Invalid token") + return data + + +@app.post("/deploy") +@app.delete("/deploy") +async def deploy(request: Request, token: dict = Depends(verify_token)): + data = await request.json() + + try: + project_git_url = urlparse(data.get("project_git_url")).path + except Exception as e: + logging.error(e) + return JSONResponse( + status_code=400, + content={"message": f"Bad Project Git URL: {str(e)}"}, + ) + + if not project_git_url or not project_git_url.endswith(".git"): + return JSONResponse( + status_code=400, + content={"message": "Project URL should not be empty and end with .git"}, + ) + + project_name = project_git_url[:-4] # remove .git from the end config = DeploymentConfig( project_name=project_name, branch_name=data.get("branch"), project_git_url=data.get("project_git_url"), - compose_file_location=data.get("compose_file_location") or "docker-compose.yml", + compose_file_location=data.get("compose_file_location"), rest_action=request.method, ) - deployer = Deployer(config) - if request.method == "POST": + + if request.method == constants.POST: urls = deployer.deploy_preview_environment() - return jsonify(urls) - elif request.method == "DELETE": + return JSONResponse(content=urls) + elif request.method == constants.DELETE: deployer.delete_preview_environment() - return ( - jsonify({"message": "Removed preview environment"}), - 200, + return JSONResponse( + status_code=200, + content={"message": "Removed preview environment"}, ) else: - return ( - jsonify({"error": "Invalid HTTP method. Supported methods: POST, DELETE"}), - 405, + return JSONResponse( + status_code=405, + content={"error": "Invalid HTTP method. Supported methods: POST, DELETE"}, ) if __name__ == "__main__": - app.run(debug=True, use_reloader=False) + import uvicorn + + uvicorn.run( + app, + host="0.0.0.0", + port=5000, + ) diff --git a/docker-compose-local.yml b/docker-compose-local.yml index 7305be8..d44a400 100644 --- a/docker-compose-local.yml +++ b/docker-compose-local.yml @@ -2,7 +2,7 @@ version: "3" services: nginx: - image: nginx:latest + image: nginx:1.26.0 restart: always container_name: sarthi_nginx ports: @@ -35,7 +35,7 @@ services: - vault vault: - image: vault:1.12.3 + image: hashicorp/vault:1.16 restart: always ports: - "8200:8200" diff --git a/docker-compose.yml b/docker-compose.yml index 9a41e58..1fe022b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,7 +2,7 @@ version: "3" services: nginx: - image: nginx:latest + image: nginx:1.26.0 restart: always container_name: sarthi_nginx ports: @@ -41,7 +41,7 @@ services: - vault loki: - image: grafana/loki:latest + image: grafana/loki:main-8978ecf restart: always ports: - 127.0.0.1:3100:3100 @@ -52,7 +52,7 @@ services: - promtail promtail: - image: grafana/promtail:latest + image: grafana/promtail:main-8978ecf restart: always volumes: - /var/log:/var/log @@ -60,7 +60,7 @@ services: command: -config.file=/etc/promtail/promtail-config.yaml grafana: - image: grafana/grafana + image: grafana/grafana:10.1.10-ubuntu restart: always container_name: grafana volumes: @@ -71,7 +71,7 @@ services: - loki vault: - image: vault:1.12.3 + image: hashicorp/vault:1.16 restart: always volumes: - ./vault/vault.json:/vault/config/vault.json @@ -94,7 +94,7 @@ services: retries: 3 portainer: - image: portainer/portainer-ce:latest + image: portainer/portainer-ce:2.20.2 volumes: - portainer_data:/data - /var/run/docker.sock:/var/run/docker.sock diff --git a/requirements-dev.txt b/requirements-dev.txt index 4d05f8a..2df6576 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,4 +1,4 @@ -pytest==7.4.4 -pytest-mock==3.12.0 -pre-commit==3.6.0 -coverage==7.4.0 \ No newline at end of file +pytest~=7.4.4 +pytest-mock~=3.12.0 +pre-commit~=3.6.0 +coverage~=7.4.0 \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index bcf54e4..a306cf1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ -pyyaml==6.0.1 -flask==3.0.0 -pyjwt==2.8.0 -Flask-HTTPAuth==4.8.0 -python-dotenv==1.0.0 -requests==2.31.0 -filelock==3.13.1 \ No newline at end of file +pyyaml~=6.0.1 +pyjwt~=2.8.0 +python-dotenv~=1.0.0 +requests>=2.32.0 +filelock~=3.13.1 +fastapi~=0.111.0 +uvicorn~=0.29.0 \ No newline at end of file diff --git a/server/constants.py b/server/constants.py new file mode 100644 index 0000000..530424c --- /dev/null +++ b/server/constants.py @@ -0,0 +1,20 @@ +# Can't be used as branch names for now +DEFAULT_SECRETS_PATH = ["default-dev-secrets"] +COMPOSE_FILE = "docker-compose.yml" + +GET = "GET" +POST = "POST" +DELETE = "DELETE" +PUT = "PUT" + +LOCAL = "LOCAL" +PROD = "PROD" + +DOCKER_HOST_NETWORK_DOMAIN = "host.docker.internal" + +DEFAULT_DEPLOYMENT_PORT_START = 15000 +DEFAULT_DEPLOYMENT_PORT_END = 25000 + +LOCALHOST = "localhost" + +SAMPLE_ENV_FILENAMES = [".env.sample", "env.sample", "sample.env"] diff --git a/server/deployer.py b/server/deployer.py index 14d84bd..1d6ee02 100644 --- a/server/deployer.py +++ b/server/deployer.py @@ -5,6 +5,9 @@ import typing import filelock +from fastapi.exceptions import HTTPException + +import server.constants as constants from .utils import ComposeHelper, DeploymentConfig, NginxHelper, SecretsHelper @@ -28,12 +31,12 @@ def __init__(self, config: DeploymentConfig): ) with self._lock: - if config.rest_action != "DELETE": + if config.rest_action != constants.DELETE: self._setup_project() self._compose_helper = ComposeHelper( os.path.join(self._project_path, config.compose_file_location), - config.rest_action != "DELETE", + config.rest_action != constants.DELETE, ) self._secrets_helper = SecretsHelper( self._config.project_name, self._config.branch_name, self._project_path @@ -62,14 +65,17 @@ def _clone_project(self): stdout, stderr = process.communicate() except subprocess.TimeoutExpired as e: logger.error(f"Error cloning the repo {self._config} with {e}") - raise + raise HTTPException(500, e) if process.returncode == 0: logger.info("Git clone successful.") else: logger.error(f"Git clone failed. Return code: {process.returncode}") logger.error(f"Standard Output: {stdout.decode()}") logger.error(f"Standard Error: {stderr.decode()}") - raise Exception(f"Cloning the Git repo failed {self._config}") + raise HTTPException( + 500, + f"Cloning the Git repo failed {self._config.project_git_url}:{self._config.branch_name} {stderr.decode()}", + ) def _setup_project(self): if os.path.exists(self._project_path): diff --git a/server/utils.py b/server/utils.py index cd73bad..af35537 100644 --- a/server/utils.py +++ b/server/utils.py @@ -12,6 +12,9 @@ import requests import yaml from dotenv import dotenv_values +from fastapi import HTTPException + +import server.constants as constants logger = logging.getLogger(__name__) @@ -21,8 +24,8 @@ class DeploymentConfig: project_name: str branch_name: str project_git_url: str - compose_file_location: str = "docker-compose.yml" - rest_action: str = "POST" + compose_file_location: str = constants.COMPOSE_FILE + rest_action: str = constants.POST def __post_init__(self): self.branch_name_raw = self.branch_name @@ -30,22 +33,25 @@ def __post_init__(self): self.project_name = re.sub(r"[^a-zA-Z]", "", self.project_name.lower()) self.project_name = ( - self.project_name[-10:] - if len(self.project_name) > 10 - else self.project_name + self.project_name[:10] if len(self.project_name) > 10 else self.project_name ) self.branch_name = re.sub(r"[^a-zA-Z]", "", self.branch_name.lower()) self.branch_name = ( self.branch_name[:20] if len(self.branch_name) > 20 else self.branch_name ) + if self.branch_name == constants.DEFAULT_SECRETS_PATH: + raise HTTPException( + 400, + f"{constants.DEFAULT_SECRETS_PATH} is a reserved keyword in Sarthi. Please use a different branch name", + ) def get_project_hash(self): return get_random_stub(f"{self.project_name}:{self.branch_name}", 10) def __repr__(self): return ( - f"DeploymentConfig({self.project_name_raw!r}, {self.branch_name_raw!r}, {self.project_git_url!r}, " - f"{self.compose_file_location!r}, {self.rest_action!r})" + f"DeploymentConfig({self.project_name_raw=!r}, {self.branch_name_raw=!r}, {self.project_git_url=!r}, " + f"{self.compose_file_location=!r}, {self.rest_action=!r})" ) @@ -74,14 +80,24 @@ def __init__(self, compose_file_location: str, load_compose_file=True): def start_services( self, nginx_port: str, conf_file_path: str, deployment_namespace: str ): - self._generate_processed_compose_file( - nginx_port, conf_file_path, deployment_namespace - ) + try: + self._generate_processed_compose_file( + nginx_port, conf_file_path, deployment_namespace + ) + except Exception as e: + logger.error(f"Error generating processed compose file: {e}") + raise HTTPException(500, e) command = ["docker-compose", "up", "-d", "--build"] project_dir = pathlib.Path(self._compose_file_location).parent - subprocess.run(command, check=True, cwd=project_dir) - logger.info("Docker Compose up -d --build executed successfully.") + + try: + subprocess.run(command, check=True, cwd=project_dir) + logger.info("Docker Compose up -d --build executed successfully.") + except Exception as e: + msg = f"An unexpected error occurred while starting services: {e}" + logger.error(msg) + raise HTTPException(500, msg) def remove_services(self): if not os.path.exists(pathlib.Path(self._compose_file_location).parent): @@ -89,8 +105,13 @@ def remove_services(self): return command = ["docker-compose", "down", "-v"] project_dir = pathlib.Path(self._compose_file_location).parent - subprocess.run(command, check=True, cwd=project_dir) - logger.info("Docker Compose down -v executed successfully.") + try: + subprocess.run(command, check=True, cwd=project_dir) + logger.info("Docker Compose down -v executed successfully.") + except Exception as e: + msg = f"An unexpected error occurred while starting services: {e}" + logger.error(msg) + raise HTTPException(500, msg) def _generate_processed_compose_file( self, nginx_port: str, conf_file_path: str, deployment_namespace: str @@ -203,11 +224,21 @@ def __init__( self._branch_name = config.branch_name self._project_hash = config.get_project_hash() self._port = None - self._host_name = os.environ.get("DEPLOYMENT_HOST") or "host.docker.internal" - self._start_port = os.environ.get("DEPLOYMENT_PORT_START") or 15000 - self._end_port = os.environ.get("DEPLOYMENT_PORT_END") or 20000 - self._DOMAIN_NAME = os.environ.get("DOMAIN_NAME") or "localhost" - self._DOCKER_INTERNAL_HOSTNAME: typing.Final[str] = "host.docker.internal" + self._host_name = ( + os.environ.get("DEPLOYMENT_HOST") or constants.DOCKER_HOST_NETWORK_DOMAIN + ) + self._start_port = ( + os.environ.get("DEPLOYMENT_PORT_START") + or constants.DEFAULT_DEPLOYMENT_PORT_START + ) + self._end_port = ( + os.environ.get("DEPLOYMENT_PORT_END") + or constants.DEFAULT_DEPLOYMENT_PORT_END + ) + self._DOMAIN_NAME = os.environ.get("DOMAIN_NAME") or constants.LOCALHOST + self._DOCKER_INTERNAL_HOSTNAME: typing.Final[ + str + ] = constants.DOCKER_HOST_NETWORK_DOMAIN self._outer_conf_base_path = outer_conf_base_path self._deployment_project_path = deployment_project_path self._conf_file_name = f"{self._project_name}-{self._project_hash}.conf" @@ -231,7 +262,7 @@ def find_free_port(self) -> str: self._port = current_port return str(current_port) - raise RuntimeError("Could not find a free port in the specified range") + raise HTTPException(500, "Could not find a free port in the specified range") def generate_outer_proxy_conf_file(self, port: str) -> str: port = port or self._port @@ -293,10 +324,14 @@ def _test_nginx_config(self): def reload_nginx(self): self._test_nginx_config() - subprocess.run( - ["docker", "exec", "sarthi_nginx", "nginx", "-s", "reload"], - check=True, - ) + try: + subprocess.run( + ["docker", "exec", "sarthi_nginx", "nginx", "-s", "reload"], + check=True, + ) + except Exception as e: + logger.error(e) + raise HTTPException(500, "Failed to reload Nginx for the deployment") logger.info("Nginx reloaded successfully.") def remove_outer_proxy(self): @@ -313,57 +348,89 @@ class SecretsHelper: def __init__(self, project_name, branch_name, project_path): vault_url = os.environ.get("VAULT_BASE_URL") vault_token = os.environ.get("VAULT_TOKEN") + if not vault_url or not vault_token: + raise HTTPException(500, "Vault is down or not configured correctly.") self._project_path = project_path self._secrets_namespace = f"{project_name}/{branch_name}" self._secret_url = f"{vault_url}/v1/kv/data/{self._secrets_namespace}" + self._default_secret_url = ( + f"{vault_url}/v1/kv/data/{project_name}/default-dev-secrets" + ) self._secret_metadata_url = ( f"{vault_url}/v1/kv/metadata/{self._secrets_namespace}" ) self._headers = {"X-Vault-Token": vault_token} + def _write_secrets_to_vault(self, secret_path_url, secrets: typing.Dict): + try: + response = requests.post( + url=secret_path_url, + headers=self._headers, + data=json.dumps( + {"data": {key: value for key, value in secrets.items()}} + ), + ) + response.raise_for_status() + except requests.HTTPError as e: + logger.error(f"Error writing secrets to {secret_path_url}", e) + raise HTTPException(500, e) + + def _read_secrets_from_vault(self, secret_path_url): + try: + response = requests.get(url=secret_path_url, headers=self._headers) + except requests.HTTPError: + logger.error(f"Cannot read secrets from vault for {secret_path_url}") + raise HTTPException( + 500, f"Cannot read secrets from vault for {secret_path_url}" + ) + if response.status_code != 200: + return None + return response.json() + def _create_env_placeholder(self): - sample_envs = {"key": "secret-value"} - # check for .env.sample in folder and load those sample .env vars in vault - sample_env_path = os.path.join(self._project_path, ".env.sample") - if os.path.exists(sample_env_path): - sample_envs = dotenv_values(sample_env_path) - - sample_env_path = os.path.join(self._project_path, "sample.env") - if os.path.exists(sample_env_path): - sample_envs = dotenv_values(sample_env_path) - - response = requests.post( - url=self._secret_url, - headers=self._headers, - data=json.dumps( - {"data": {key: value for key, value in sample_envs.items()}} - ), - ) - response.raise_for_status() - logger.debug(f"Successfully loaded sample env vars in value {response.json()}") + # check whether we can copy env vars from the default-dev-secrets path + default_response = self._read_secrets_from_vault(self._default_secret_url) + + if not default_response: + logger.debug( + "Default secrets not found, ingesting secrets from sample envs" + ) + # check for .env.sample in folder and load those sample .env vars in vault + sample_envs = {"key": "secret-value"} + for sample_env_filename in constants.SAMPLE_ENV_FILENAMES: + sample_env_path = os.path.join(self._project_path, sample_env_filename) + if os.path.exists(sample_env_path): + sample_envs = dotenv_values(sample_env_path) + break + self._write_secrets_to_vault(self._default_secret_url, sample_envs) + self._write_secrets_to_vault(self._secret_url, sample_envs) + else: + self._write_secrets_to_vault(self._secret_url, default_response) def inject_env_variables(self, project_path): - response = requests.get(url=self._secret_url, headers=self._headers) - if response.status_code != 200: - logger.debug(f"No secrets found in vault for {self._secrets_namespace}") + secret_data = self._read_secrets_from_vault(self._secret_url) + + if not secret_data: + logger.info(f"No secrets found in vault for {self._secrets_namespace}") self._create_env_placeholder() return - logger.debug(f"Found secrets for {self._secrets_namespace}") - secret_data = response.json() + + logger.info(f"Found secrets for {self._secrets_namespace}") with open(os.path.join(project_path, ".env"), "w") as file: for key, value in secret_data["data"]["data"].items(): file.write(f'{key}="{value}"\n') def cleanup_deployment_variables(self): - response = requests.delete(url=self._secret_metadata_url, headers=self._headers) - logger.debug( - f"Tried Removing Deployment variables from Vault {response.status_code}" - ) try: + response = requests.delete( + url=self._secret_metadata_url, headers=self._headers + ) + logger.debug( + f"Tried Removing Deployment variables from Vault {response.status_code}" + ) response.raise_for_status() except requests.HTTPError as e: - logger.debug(f"Error removing deployment secrets {e}") - return response + logger.error(f"Error removing deployment secrets {e}") def get_random_stub(project_name: str, length: int = 64) -> str: @@ -372,5 +439,18 @@ def get_random_stub(project_name: str, length: int = 64) -> str: def load_yaml_file(filename: str): - with open(filename) as file: - return yaml.safe_load(file) + try: + with open(filename) as file: + return yaml.safe_load(file) + except FileNotFoundError as e: + logging.error(f"File not found: {filename}. Error: {str(e)}") + raise HTTPException( + status_code=400, + detail=f"File not found: {filename}. Please provie valid compose file location in config", + ) + except yaml.YAMLError as e: + logging.error(f"Error parsing YAML file: {filename}. Error: {str(e)}") + raise HTTPException( + status_code=400, + detail=f"Error parsing YAML file: {filename}", + )