diff --git a/build/ax/build/devops.py b/build/ax/build/devops.py index 62f948cd2573..f84578c6e0ec 100644 --- a/build/ax/build/devops.py +++ b/build/ax/build/devops.py @@ -13,7 +13,7 @@ DEVOPS_CONTAINERS_PATH = os.path.join(SRC_PATH, "devops/builds") -DEVOPS_BUILDER_IMAGE = '{}/argobase/axdevopsbuilder:v8'.format(ARGO_BASE_REGISTRY) +DEVOPS_BUILDER_IMAGE = '{}/argobase/axdevopsbuilder:v9'.format(ARGO_BASE_REGISTRY) DEBIAN_BUILDER_IMAGE = '{}/argobase/axplatbuilder-debian:v15'.format(ARGO_BASE_REGISTRY) class DevOpsModules(object): diff --git a/devops/src/ax/devops/apps/gateway/axjira/__init__.py b/devops/__init__.py similarity index 100% rename from devops/src/ax/devops/apps/gateway/axjira/__init__.py rename to devops/__init__.py diff --git a/devops/builds/infrastructure/axeventtrigger/Dockerfile-debug.in b/devops/builds/infrastructure/axeventtrigger/Dockerfile-debug.in deleted file mode 100644 index 1041eb4bb7d8..000000000000 --- a/devops/builds/infrastructure/axeventtrigger/Dockerfile-debug.in +++ /dev/null @@ -1,12 +0,0 @@ -# Dockerfile for event trigger - -FROM %%BUILDER_IMAGE_ID%% - -ENV PYTHONPATH=/ax/src -COPY docker_build/src /ax/src - -COPY axeventtrigger.py /ax/bin/axeventtrigger -COPY run.sh /ax/bin/run.sh -RUN chmod +x /ax/bin/run.sh - -CMD /ax/bin/run.sh \ No newline at end of file diff --git a/devops/builds/infrastructure/axeventtrigger/Dockerfile.in b/devops/builds/infrastructure/axeventtrigger/Dockerfile.in deleted file mode 100644 index 99288a4a10d3..000000000000 --- a/devops/builds/infrastructure/axeventtrigger/Dockerfile.in +++ /dev/null @@ -1,10 +0,0 @@ -FROM %%BUILDER_IMAGE_ID%% - -ENV PYTHONPATH=/ax/src -COPY docker_build/src /ax/src - -COPY axeventtrigger.py /ax/bin/axeventtrigger -COPY run.sh /ax/bin/run.sh -RUN chmod +x /ax/bin/run.sh - -CMD /ax/bin/run.sh \ No newline at end of file diff --git a/devops/builds/infrastructure/axeventtrigger/axeventtrigger.py b/devops/builds/infrastructure/axeventtrigger/axeventtrigger.py deleted file mode 100755 index 74627860548b..000000000000 --- a/devops/builds/infrastructure/axeventtrigger/axeventtrigger.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python3 - -from ax.util.az_patch import az_patch - -az_patch() - -from ax.devops.apps.workers.event_trigger import main - -if __name__ == '__main__': - main() diff --git a/devops/builds/infrastructure/axeventtrigger/axeventtrigger.spec b/devops/builds/infrastructure/axeventtrigger/axeventtrigger.spec deleted file mode 100644 index fbb977b3d503..000000000000 --- a/devops/builds/infrastructure/axeventtrigger/axeventtrigger.spec +++ /dev/null @@ -1,30 +0,0 @@ -# -*- mode: python -*- - -block_cipher = None - -import os -SRCROOT = os.path.normpath(SPECPATH + '/../../../../') -HOOKSPATH = ['{}/platform/dev/pyinstaller/hooks'.format(SRCROOT)] - -a = Analysis(['{}/axeventtrigger.py'.format(SPECPATH)], - pathex=['{}/common/python'.format(SRCROOT)], - binaries=[], - hiddenimports=[], - hookspath=HOOKSPATH, - runtime_hooks=[], - excludes=[], - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=block_cipher) -pyz = PYZ(a.pure, a.zipped_data, - cipher=block_cipher) -exe = EXE(pyz, - a.scripts, - a.binaries, - a.zipfiles, - a.datas, - name='axeventtrigger', - debug=False, - strip=False, - upx=True, - console=True ) diff --git a/devops/builds/infrastructure/axeventtrigger/run.sh b/devops/builds/infrastructure/axeventtrigger/run.sh deleted file mode 100644 index 33f594617893..000000000000 --- a/devops/builds/infrastructure/axeventtrigger/run.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh - -python3 /ax/bin/axeventtrigger & -sleep 9223372036854775807 diff --git a/devops/builds/infrastructure/gateway/Dockerfile b/devops/builds/infrastructure/gateway/Dockerfile index 6d3c0e8b6514..43fdc1744cc9 100644 --- a/devops/builds/infrastructure/gateway/Dockerfile +++ b/devops/builds/infrastructure/gateway/Dockerfile @@ -2,31 +2,10 @@ FROM %%BUILDER_IMAGE_ID%% -RUN apk --update add nginx - -ENV AXHOME=/ax -ENV PYTHONPATH=$AXHOME/src -ENV DJANGOHOME $AXHOME/src/ax/devops/apps/gateway - -RUN mkdir -p $AXHOME/db $AXHOME/log -RUN mkdir -p /run/nginx - -COPY run.sh \ - gateway.conf \ - gateway.ini \ - $AXHOME/ -RUN chmod +x $AXHOME/run.sh -RUN touch /run/nginx/nginx.pid -RUN touch /var/log/cron.log -RUN ln -s $AXHOME/gateway.conf /etc/nginx/conf.d/gateway.conf - +ENV PATH $PATH:/ax/bin +ENV PYTHONPATH /ax/src EXPOSE 8889 -COPY docker_build/src $AXHOME/src - -RUN python3 $DJANGOHOME/manage.py check \ - && python3 $DJANGOHOME/manage.py makemigrations \ - && python3 $DJANGOHOME/manage.py migrate \ - && python3 $DJANGOHOME/manage.py crontab add - -CMD $AXHOME/run.sh +COPY gateway.py /ax/bin/gateway +COPY docker_build/src /ax/src +CMD ["gateway"] diff --git a/devops/builds/infrastructure/gateway/gateway.conf b/devops/builds/infrastructure/gateway/gateway.conf deleted file mode 100644 index 1e6d7c904989..000000000000 --- a/devops/builds/infrastructure/gateway/gateway.conf +++ /dev/null @@ -1,13 +0,0 @@ -upstream django { - server unix:///ax/gateway.sock; -} - -server { - listen 8889; - charset utf-8; - - location / { - uwsgi_pass django; - include /etc/nginx/uwsgi_params; - } -} diff --git a/devops/builds/infrastructure/gateway/gateway.ini b/devops/builds/infrastructure/gateway/gateway.ini deleted file mode 100644 index 20e6386f36dc..000000000000 --- a/devops/builds/infrastructure/gateway/gateway.ini +++ /dev/null @@ -1,9 +0,0 @@ -[uwsgi] - -chdir = /ax/src/ax/devops/apps/gateway -module = gateway.wsgi -master = true -processes = 2 -socket = /ax/gateway.sock -chmod-socket = 666 -vacuum = true diff --git a/devops/builds/infrastructure/gateway/gateway.py b/devops/builds/infrastructure/gateway/gateway.py new file mode 100755 index 000000000000..d1d5ad33618e --- /dev/null +++ b/devops/builds/infrastructure/gateway/gateway.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 + +from ax.devops.gateway.main import main +if __name__ == '__main__': + main() diff --git a/devops/builds/infrastructure/gateway/run.sh b/devops/builds/infrastructure/gateway/run.sh deleted file mode 100644 index bd61c33efd60..000000000000 --- a/devops/builds/infrastructure/gateway/run.sh +++ /dev/null @@ -1,6 +0,0 @@ -#!/bin/sh - -/usr/bin/uwsgi --ini /ax/gateway.ini & -/usr/sbin/nginx & -/usr/sbin/crond & -sleep 9223372036854775807 diff --git a/devops/builds/infrastructure/repomanager/Dockerfile b/devops/builds/infrastructure/repomanager/Dockerfile deleted file mode 100644 index 4dbe28683b50..000000000000 --- a/devops/builds/infrastructure/repomanager/Dockerfile +++ /dev/null @@ -1,20 +0,0 @@ -# Dockerfile for repomanager - -#FROM %%ARGO_BASE_REGISTRY%%/argobase/alpine:3.5 -# -#RUN apk --no-cache --update add git -# -#ENV PATH $PATH:/ax/bin -#COPY docker_build/dist/repomanager /ax/bin/repomanager -#CMD ["repomanager"] - -FROM %%BUILDER_IMAGE_ID%% - -ENV PYTHONPATH=/ax/src -COPY docker_build/src /ax/src - -COPY repomanager.py /ax/bin/repomanager -COPY run.sh /ax/bin/run.sh -RUN chmod +x /ax/bin/run.sh - -CMD /ax/bin/run.sh diff --git a/devops/builds/infrastructure/repomanager/repomanager.py b/devops/builds/infrastructure/repomanager/repomanager.py deleted file mode 100755 index 026e7fa8b43b..000000000000 --- a/devops/builds/infrastructure/repomanager/repomanager.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python3 - -from ax.util.az_patch import az_patch - -az_patch() - -from ax.devops.apps.workers.repo_manager import main - -if __name__ == '__main__': - main() diff --git a/devops/builds/infrastructure/repomanager/repomanager.spec b/devops/builds/infrastructure/repomanager/repomanager.spec deleted file mode 100644 index 2eef67154905..000000000000 --- a/devops/builds/infrastructure/repomanager/repomanager.spec +++ /dev/null @@ -1,30 +0,0 @@ -# -*- mode: python -*- - -block_cipher = None - -import os -SRCROOT = os.path.normpath(SPECPATH + '/../../../../') -HOOKSPATH = ['{}/platform/dev/pyinstaller/hooks'.format(SRCROOT)] - -a = Analysis(['{}/repomanager.py'.format(SPECPATH)], - pathex=['{}/common/python'.format(SRCROOT)], - binaries=[], - hiddenimports=[], - hookspath=HOOKSPATH, - runtime_hooks=[], - excludes=[], - win_no_prefer_redirects=False, - win_private_assemblies=False, - cipher=block_cipher) -pyz = PYZ(a.pure, a.zipped_data, - cipher=block_cipher) -exe = EXE(pyz, - a.scripts, - a.binaries, - a.zipfiles, - a.datas, - name='repomanager', - debug=False, - strip=False, - upx=True, - console=True ) diff --git a/devops/builds/infrastructure/repomanager/run.sh b/devops/builds/infrastructure/repomanager/run.sh deleted file mode 100644 index 35d1d0ef4723..000000000000 --- a/devops/builds/infrastructure/repomanager/run.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/sh - -python3 /ax/bin/repomanager & -sleep 9223372036854775807 diff --git a/devops/builds/templates/axdjango/Dockerfile b/devops/builds/templates/axdjango/Dockerfile deleted file mode 100644 index 1a1b82f0ee83..000000000000 --- a/devops/builds/templates/axdjango/Dockerfile +++ /dev/null @@ -1,8 +0,0 @@ -FROM %%ARGO_BASE_REGISTRY%%/argobase/django:1.9-python3 - -# Install Python dependencies -COPY requirements.txt /tmp/requirements.txt - -RUN pip install --upgrade pip \ - && pip install -r /tmp/requirements.txt \ - && apt-get update && apt-get install -y vim curl git diff --git a/devops/builds/templates/axdjango/requirements.txt b/devops/builds/templates/axdjango/requirements.txt deleted file mode 100644 index 720e6d175cc0..000000000000 --- a/devops/builds/templates/axdjango/requirements.txt +++ /dev/null @@ -1,11 +0,0 @@ -boto3==1.4.0 -django-extensions==1.6.7 -django-filter==0.13.0 -djangorestframework==3.3.3 -GitPython==1.0.2 -pika==0.10.0 -redis==2.10.3 -requests==2.10.0 -retrying==1.3.3 -redlock-py==1.0.8 -PyJWT==1.4.2 \ No newline at end of file diff --git a/devops/requirements/requirements.txt b/devops/requirements/requirements.txt index 1735d8a3ba63..8215bfa26adb 100644 --- a/devops/requirements/requirements.txt +++ b/devops/requirements/requirements.txt @@ -16,10 +16,8 @@ Jinja2==2.8 future==0.15.2 google-cloud==0.26.1 -# axagent -GitPython==1.0.2 +# gateway jira==1.0.7 -flower==0.9.1 # axscheduler APScheduler==3.2.0 @@ -27,18 +25,6 @@ APScheduler==3.2.0 # fixturemanager pymongo==3.4.0 -# gateway -django==1.9 -django-extensions==1.6.7 -django-filter==0.13.0 -djangorestframework==3.3.3 -django-crontab==0.7.1 -requests==2.10.0 -uwsgi==2.0.14 -GitPython==1.0.2 -pika==0.10.0 -redlock-py==1.0.8 - # axworkflowadc transitions==0.4.0 diff --git a/devops/src/ax/devops/apps/gateway/axjira/api.py b/devops/src/ax/devops/apps/gateway/axjira/api.py deleted file mode 100644 index 81dde69b756a..000000000000 --- a/devops/src/ax/devops/apps/gateway/axjira/api.py +++ /dev/null @@ -1,494 +0,0 @@ -import copy -import json -import logging - -import jira -import requests - -from concurrent.futures import ThreadPoolExecutor, as_completed -from urllib.parse import urlparse - -from rest_framework.decorators import api_view, detail_route, list_route -from rest_framework.response import Response -from rest_framework.viewsets import GenericViewSet - -from axjira.serializers import AXJiraSerializer -from ax.devops.axdb.axops_client import AxopsClient -from ax.devops.axsys.axsys_client import AxsysClient -from ax.devops.jira.jira_client import JiraClient -from ax.devops.jira.jira_utils import translate_jira_issue_event -from ax.exceptions import AXApiInvalidParam, AXApiAuthFailed, AXApiForbiddenReq, AXApiInternalError -from gateway.settings import LOGGER_NAME - -logger = logging.getLogger('{}.{}'.format(LOGGER_NAME, 'jira')) - -DELETE_EVENT = 'jira:issue_deleted' -UPDATE_EVENT = 'jira:issue_updated' - -STATUS_OK = {'status': 'OK'} - -axsys_client = AxsysClient() -axops_client = AxopsClient() - -def init_jira_client(url=None, username=None, password=None): - """ - :param url: - :param username: - :param password: - :return: - """ - def get_jira_configuration(): - js = axops_client.get_tools(category='issue_management', type='jira') - if js: - return {'url': js[0]['url'], - 'username': js[0]['username'], - 'password': js[0]['password'] - } - else: - return dict() - - if url is None or username is None or password is None: - conf = get_jira_configuration() - if not conf: - raise AXApiInvalidParam('No JIRA configured') - else: - url, username, password = conf['url'], conf['username'], conf['password'] - return JiraClient(url, username, password) - -def _query_match(data, query_dict): - """ - :param data: - :param query_dict: - :return: - """ - for k, v in query_dict.items(): - if data.get(k, None) != v: - return False - return True - - -class JiraUserViewSet(GenericViewSet): - - queryset = None - serializer_class = AXJiraSerializer - filtered_users_keys = ('key', 'active', 'fullname', 'email') - jira_client = None - - def list(self, request): - """ - :param request - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - query_dict = dict() - for pk in self.filtered_users_keys: - pv = request.query_params.get(pk, None) - if pk == 'active': - if pv == 'true': - pv = True - elif pv == 'false': - pv = False - if pv is not None: - query_dict[pk] = pv - - users = self.jira_client.users() - users = [u for u in users if _query_match(u, query_dict)] - return Response({'data': users}) - - -class JiraProjectViewSet(GenericViewSet): - - queryset = None - serializer_class = AXJiraSerializer - filtered_project_keys = ('id', 'key', 'name', 'projectTypeKey') - jira_client = None - - def _normalize_data(self, proj_dict): - """ - :param proj_dict: - :return: - """ - return dict([(k, proj_dict.get(k, None)) for k in self.filtered_project_keys]) - - def list(self, request): - """ - :param request - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - query_dict = dict() - for pk in self.filtered_project_keys: - pv = request.query_params.get(pk, None) - if pv is not None: - query_dict[pk] = pv - - ps = self.jira_client.get_projects(json_result=True) - ps = [p for p in ps if _query_match(p, query_dict)] - ps = [self._normalize_data(p) for p in ps] - return Response({'data': ps}) - - def retrieve(self, request, pk=None): - """ - :param request: - :param pk: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - proj = self.jira_client.get_project(pk, json_result=True) - return Response(self._normalize_data(proj)) - - @list_route(methods=['POST',]) - def test(self, request): - """Test connection to Jira server. - - :param request: - :return: - """ - url = request.data.get('url', '').lower() - username = request.data.get('username', None) - password = request.data.get('password', None) - logger.info('Received request (url: %s, username: %s, password: ******)', url, username) - - assert all([url, username, password]), \ - AXApiInvalidParam('Missing required parameters', detail='Required parameters (username, password, url)') - - try: - init_jira_client(url, username, password) - except requests.exceptions.ConnectionError as exc: - raise AXApiInternalError('Invalid URL', detail=str(exc)) - except jira.exceptions.JIRAError as exc: - raise AXApiInternalError('Invalid authentication', detail=str(exc)) - except Exception as exc: - raise AXApiInternalError('Failed to connect to JIRA', detail=str(exc)) - else: - return Response(STATUS_OK) - - -class JiraIssueViewSet(GenericViewSet): - """View set for JIRA issue.""" - - queryset = None - serializer_class = AXJiraSerializer - default_max_results = 3 - filtered_issue_keys = ('project', 'status', 'component', 'labels', 'issuetype', 'priority', - 'creator', 'assignee', 'reporter', 'fixversion', 'affectedversion') - jira_client = None - - def create(self, request): - """ - :param request: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - - logger.info('Received jira issue creation request (%s)', request.data) - project = request.data.get('project', None) - summary = request.data.get('summary', None) - issuetype = request.data.get('issuetype', None) - reporter = request.data.get('reporter', None) - - description = request.data.get('description', None) # optional - - if project is None: - raise AXApiInvalidParam('Missing required parameters: Project', - detail='Missing required parameters, Project') - if summary is None: - raise AXApiInvalidParam('Missing required parameters: Summary', - detail='Missing required parameters, Summary') - if issuetype is None: - raise AXApiInvalidParam('Missing required parameters: Issuetype', - detail='Missing required parameters, Issuetype') - if reporter is None: - raise AXApiInvalidParam('Missing required parameters: Reporter', - detail='Missing required parameters, Reporter') - - try: - issue_obj = self.jira_client.create_issue(project, - summary, - issuetype=issuetype, - reporter=reporter, - description=description) - except jira.exceptions.JIRAError as exc: - raise AXApiInternalError('Invalid Parameters', detail=str(exc)) - else: - issue_dict = copy.deepcopy(issue_obj.raw['fields']) - issue_dict['url'] = issue_obj.self - issue_dict['id'] = issue_obj.id - issue_dict['key'] = issue_obj.key - return Response(issue_dict) - - def list(self, request): - """ - :param request: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - - query_ids = request.query_params.get('ids', None) - if query_ids is not None: - issues = [] - with ThreadPoolExecutor(max_workers=5) as executor: - futures = [] - id_list = query_ids.strip().split(',') - logger.info('Query the following Jira issues: %s', id_list) - for id in id_list: - futures.append(executor.submit(self.jira_client.get_issue, id.strip(), json_result=True)) - for future in as_completed(futures): - try: - issues.append(future.result()) - except Exception as exc: - logger.warn('Unexpected exception %s', exc) - else: - kwargs = dict() - for key in request.query_params.keys(): - if key.lower() in self.filtered_issue_keys: - kwargs[key.lower()] = request.query_params.get(key) - logger.info('Query kwargs: %s:', kwargs) - issues = self.jira_client.query_issues(json_result=True, **kwargs) - - return Response(issues) - - def retrieve(self, request, pk=None): - """ - :param request: - :param pk: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - issue = self.jira_client.get_issue(pk, json_result=True) - return Response(issue) - - @detail_route(methods=['GET',]) - def getcomments(self, request, pk=None): - """ - :param request: - :param pk: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - max_results = int(request.query_params.get('max_results', self.default_max_results)) - comments = self.jira_client.get_issue_comments(pk, - latest_num=max_results, - json_result=True) - return Response(comments) - - @detail_route(methods=['POST',]) - def addcomment(self, request, pk=None): - """ - :param request: - :param pk: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - comment = request.data.get('comment', None) - user = request.data.get('user', None) - - if not comment: - raise AXApiInvalidParam('Require Comment message info') - if not user: - raise AXApiInvalidParam('Require Commenter info') - - try: - self.jira_client.add_issue_comment(pk, comment, commenter=user) - except Exception as exc: - raise AXApiInternalError('Failed to add comment', detail=str(exc)) - return Response(STATUS_OK) - - -class JiraIssueTypeViewSet(GenericViewSet): - """View set for JIRA issue types.""" - - queryset = None - serializer_class = AXJiraSerializer - jira_client = None - - def list(self, request): - """ - :param request: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - issue_types = self.jira_client.get_issue_types(json_result=True) - return Response({'data': issue_types}) - - def retrieve(self, request, pk=None): - """ - :param request: - :param pk: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - issue_type = self.jira_client.get_issue_type_by_name(pk, json_result=True) - return Response(issue_type) - - -class JiraWebhookViewSet(GenericViewSet): - """View set for JIRA webhook""" - - queryset = None - serializer_class = AXJiraSerializer - jira_client = None - - def list(self, request): - """ - :param request: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - - ax_webhooks = self.jira_client.get_ax_webhooks() - return Response({'data': ax_webhooks}) - - def create(self, request): - """ - :param request: - :return: - """ - logger.info('Received jira webhook creation request') - url = request.data.get('url', None) - username = request.data.get('username', None) - password = request.data.get('password', None) - webhook = request.data.get('webhook', None) - projects = request.data.get('projects', None) - - # Create ingress - try: - dnsname = urlparse(webhook).netloc - logger.info('Creating ingress for Jira webhook %s', dnsname) - axsys_client.create_ingress(dnsname) - except Exception as exc: - logger.error('Failed to create ingress for webhook: %s', str(exc)) - raise AXApiInternalError('Failed to create ingress for webhook', str(exc)) - else: - logger.info('Successfully created ingress for webhook') - - # Create webhook - self.jira_client = init_jira_client(url=url, username=username, password=password) - try: - if projects: - logger.info('Filtered projects are: %s', projects) - if type(projects) == str: - projects = json.loads(projects) - else: - logger.info('No project filter') - projects = None - wh = self.jira_client.create_ax_webhook(webhook, projects=projects) - except Exception as exc: - logger.exception(exc) - raise AXApiInternalError('Fail to create jira webhooks', detail=str(exc)) - return Response(wh.json()) - - def put(self, request, pk=None): - """ - :param request: - :param pk: - :return: - """ - if self.jira_client is None: - self.jira_client = init_jira_client() - - projects = request.data.get('projects', None) - logger.info('Received jira webhook update request ...') - # Update webhook - try: - if projects: - logger.info('Filtered projects are: %s', projects) - if type(projects) == str: - projects = json.loads(projects) - else: - logger.info('No project filter') - projects = None - self.jira_client.update_ax_webhook(projects) - except Exception as exc: - logger.exception(exc) - raise AXApiInternalError('Fail to update jira webhooks', detail=str(exc)) - else: - logger.info('Successfully updated Jira webhook') - return Response(STATUS_OK) - - def delete(self, request): - """ - :param request: - :return: - """ - if self.jira_client is None: - try: - self.jira_client = init_jira_client() - except Exception as exc: - logger.warn('Could not log into Jira, skip it') - return Response(STATUS_OK) - - wh = self.jira_client.get_ax_webhook() - if not wh: - logger.warn('No webhook on Jira server, ignore it') - return Response(STATUS_OK) - - # Delete ingress - try: - logger.info('Deleting ingress for Jira webhook %s', wh['url']) - axsys_client.delete_ingress(urlparse(wh['url']).netloc) - except Exception as exc: - logger.error('Failed to delete ingress for webhook: %s', str(exc)) - raise AXApiInternalError('Failed to delete ingress for webhook', str(exc)) - else: - logger.info('Successfully deleted ingress for webhook') - # Delete webhook - try: - self.jira_client.delete_ax_webhook() - except Exception as exc: - logger.exception(exc) - raise AXApiInternalError('Fail to delete jira webhooks', detail=str(exc)) - return Response(STATUS_OK) - - -@api_view(['POST']) -def events(request): - """Create a JIRA webhook event. - - :param request: - :return: - """ - checked_fields =('description', 'project', 'status', 'summary', 'Key') - - payload = request.data - try: - logger.info('Translating JIRA event ...') - event = translate_jira_issue_event(payload) - except Exception as exc: - logger.error('Failed to translate event: %s', exc) - raise AXApiInternalError('Failed to translate event', detail=str(exc)) - else: - logger.info('Successfully translated event: %s', event) - - try: - if event['type'] == UPDATE_EVENT: - logger.info('The following Jira field(s) get updated: %s', event['changed_fields']) - if event['status_category_id'] == 3: - logger.info('Jira issue %s is closed', event['id']) - logger.info('Delete Jira on AXDB %s', event['id']) - axops_client.delete_jira_issue(event['id']) - elif event['changed_fields'] and any(f in event['changed_fields'] for f in checked_fields): - logger.info('Update Jira content on AXDB ...') - axops_client.update_jira_issue(event['axdb_content']) - else: - logger.info('No Jira content need to be updated') - elif event['type'] == DELETE_EVENT: - logger.info('Delete Jira on AXDB %s', event['id']) - axops_client.delete_jira_issue(event['id']) - else: - logger.warn('Not supported event: (%s), ignore it', event['type']) - except Exception as exc: - raise AXApiInternalError('Failed to update JIRA content on AXDB', detail=str(exc)) - else: - return Response(STATUS_OK) diff --git a/devops/src/ax/devops/apps/gateway/axjira/apps.py b/devops/src/ax/devops/apps/gateway/axjira/apps.py deleted file mode 100644 index 7b3859a87486..000000000000 --- a/devops/src/ax/devops/apps/gateway/axjira/apps.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2017 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from django.apps import AppConfig - - -class AXJiraConfig(AppConfig): - name = 'axjira' diff --git a/devops/src/ax/devops/apps/gateway/axjira/cron.py b/devops/src/ax/devops/apps/gateway/axjira/cron.py deleted file mode 100644 index 6b2449100bff..000000000000 --- a/devops/src/ax/devops/apps/gateway/axjira/cron.py +++ /dev/null @@ -1,88 +0,0 @@ -import logging -import json -import os - -from ax.devops.axdb.axops_client import AxopsClient -from ax.devops.axsys.axsys_client import AxsysClient -from ax.devops.scm_rest.github_client import GitHubClient -from ax.notification_center import CODE_JOB_CI_ELB_CREATION_FAILURE -from gateway.settings import LOGGER_NAME -from gateway.kafka import event_notification_client - -axops_client = AxopsClient() -axsys_client = AxsysClient() -github_client = GitHubClient() -cache_file = '/tmp/github_webhook_whitelist' - -logger = logging.getLogger('{}.{}'.format(LOGGER_NAME, 'jira_cron')) - - -def check_github_whitelist(): - """ - :return: - """ - if not is_github_webhook_enabled(): - logger.info('No GitHub webhook configured') - return - - configured = get_from_cache() - logger.info('The configured GitHub webhook whitelist is %s', configured) - advertised = github_client.get_webhook_whitelist() - logger.info('The GitHub webhook whitelist is %s', advertised) - if set(configured) == set(advertised): - logger.info('No update needed') - else: - # Create ELB - payload = {'ip_range': advertised, 'external_port': 8443, 'internal_port': 8087} - try: - logger.info('Creating ELB for webhook ...') - axsys_client.create_webhook(**payload) - except Exception as exc: - logger.error('Failed to create ELB for webhook: %s', str(exc)) - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_CREATION_FAILURE, detail=payload) - else: - # Update cache - write_to_cache(advertised) - logger.info('Successfully updated ELB for webhook') - - -def is_github_webhook_enabled(): - """ Check whether the webhook is configured or not - :return: - """ - github_data = axops_client.get_tools(type='github') - use_webhook = [each for each in github_data if each['use_webhook']] - return bool(use_webhook) - - -def write_to_cache(ip_range): - """ Store the webhook whitelist info - :param ip_range: - :return: - """ - with open(cache_file, 'w+') as f: - f.write(json.dumps((ip_range))) - - -def get_from_cache(): - """ Get cached webhook whitelist info, otherwise get from axmon - :return: - """ - if os.path.exists(cache_file): - with open(cache_file, 'r+') as f: - data = f.readlines() - ip_range = json.loads(data[0]) - else: - logger.debug('No cache file') - try: - data = axsys_client.get_webhook() - except Exception as exc: - logger.warn(exc) - else: - logger.info('Write whitelist info to cache file') - ip_range = data['ip_ranges'] - write_to_cache(ip_range) - return ip_range - - - diff --git a/devops/src/ax/devops/apps/gateway/axjira/models.py b/devops/src/ax/devops/apps/gateway/axjira/models.py deleted file mode 100644 index 4388faa37fe6..000000000000 --- a/devops/src/ax/devops/apps/gateway/axjira/models.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2017 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from django.db import models - - -class AXJira(models.Model): - """DevOps jira. - - Currently, this model is only used for enabling REST API. - """ diff --git a/devops/src/ax/devops/apps/gateway/axjira/serializers.py b/devops/src/ax/devops/apps/gateway/axjira/serializers.py deleted file mode 100644 index 2f59f463f5a3..000000000000 --- a/devops/src/ax/devops/apps/gateway/axjira/serializers.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2017 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from axjira.models import AXJira - -from rest_framework import serializers - - -class AXJiraSerializer(serializers.ModelSerializer): - """Serializer for jira.""" - - class Meta: - model = AXJira diff --git a/devops/src/ax/devops/apps/gateway/gateway/__init__.py b/devops/src/ax/devops/apps/gateway/gateway/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/devops/src/ax/devops/apps/gateway/gateway/exceptions.py b/devops/src/ax/devops/apps/gateway/gateway/exceptions.py deleted file mode 100644 index c35b6684d1fb..000000000000 --- a/devops/src/ax/devops/apps/gateway/gateway/exceptions.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2017 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from rest_framework import exceptions as rest_exceptions -from rest_framework.response import Response - -from ax import exceptions as ax_exceptions - -exception_mapping = { - rest_exceptions.APIException: ax_exceptions.AXApiInternalError, - rest_exceptions.AuthenticationFailed: ax_exceptions.AXApiAuthFailed, - rest_exceptions.MethodNotAllowed: ax_exceptions.AXApiResourceNotFound, # Method not allowed is casted to resource not found - rest_exceptions.NotAuthenticated: ax_exceptions.AXApiAuthFailed, - rest_exceptions.NotFound: ax_exceptions.AXApiResourceNotFound, - rest_exceptions.ParseError: ax_exceptions.AXApiInvalidParam, - rest_exceptions.PermissionDenied: ax_exceptions.AXApiForbiddenReq, - rest_exceptions.ValidationError: ax_exceptions.AXApiInvalidParam -} - - -def ax_exception_handler(e, context): - """Customize return error code. - - :param e: - :param context: - :return: - """ - if not isinstance(e, ax_exceptions.AXException): - exception_class = exception_mapping.get(e.__class__, ax_exceptions.AXApiInternalError) - try: - e = exception_class(e.detail) - except AttributeError: - e = exception_class(str(e)) - return Response(e.json(), status=e.status_code) diff --git a/devops/src/ax/devops/apps/gateway/gateway/kafka.py b/devops/src/ax/devops/apps/gateway/gateway/kafka.py deleted file mode 100644 index 0a3926bfe5db..000000000000 --- a/devops/src/ax/devops/apps/gateway/gateway/kafka.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2017 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from ax.devops.kafka.kafka_client import EventNotificationClient -from ax.notification_center import FACILITY_GATEWAY - -event_notification_client = EventNotificationClient(FACILITY_GATEWAY) diff --git a/devops/src/ax/devops/apps/gateway/gateway/settings.py b/devops/src/ax/devops/apps/gateway/gateway/settings.py deleted file mode 100644 index 265468a0d93f..000000000000 --- a/devops/src/ax/devops/apps/gateway/gateway/settings.py +++ /dev/null @@ -1,185 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2017 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -import os -import sys - -# Standard default Django settings - -BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) - -SECRET_KEY = 'y&h50xpbsgujool%k2w7w-7c$i8dhqub3dr$l@h&3$^cx!uvzi' - -MIDDLEWARE_CLASSES = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.auth.middleware.SessionAuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', -] - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - }, - }, -] - -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] - -WSGI_APPLICATION = 'gateway.wsgi.application' - -LANGUAGE_CODE = 'en-us' - -USE_I18N = True - -USE_L10N = True - -USE_TZ = True - -# AX customized settings - -DEBUG = False - -ALLOWED_HOSTS = ['*'] - -# AX home -AXHOME = os.environ['AXHOME'] - -# Database settings - -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.sqlite3', - 'NAME': os.path.join(AXHOME, 'db', 'db.sqlite3'), - } -} - -# Root URL configuration - -ROOT_URLCONF = 'gateway.urls' - -# Time zone - -TIME_ZONE = 'UTC' - -# Static file URL -STATIC_URL = '/static/' - -# Applications - -INSTALLED_APPS = [ - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_crontab', - 'django_extensions', - 'django_filters', - 'axjira', - 'rest_framework', - 'result', - 'scm', -] - -# Date / time format - -DATETIME_FORMAT = 'c' - -DATE_FORMAT = 'Y-m-d' - -TIME_FORMAT = 'H:i:s' - -# Logging - -LOG_ROOT = os.path.join(AXHOME, 'log') - -LOG_LEVEL = os.environ.get('DJANGO_LOGLEVEL') or ('DEBUG' if DEBUG else 'INFO') - -LOGGER_NAME = 'ax.devops.gateway' - -LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'formatters': { - 'verbose': { - 'format': "[%(asctime)s::%(levelname)s::%(name)s::%(lineno)s] %(message)s", - 'datefmt': "%Y-%m-%dT%H:%M:%S" - } - }, - 'handlers': { - 'console': { - 'level': LOG_LEVEL, - 'class': 'logging.StreamHandler', - 'stream': sys.stdout, - 'formatter': 'verbose' - }, - }, - 'loggers': { - LOGGER_NAME: { - 'handlers': ['console'], - 'level': LOG_LEVEL, - 'propagate': True, - }, - 'django': { - 'handlers': ['console'], - 'level': LOG_LEVEL, - 'propagate': True, - } - } -} - -# REST framework settings - -REST_FRAMEWORK = { - 'DEFAULT_AUTHENTICATION_CLASSES': ( - 'rest_framework.authentication.SessionAuthentication', - ), - 'DEFAULT_PERMISSION_CLASSES': ( - 'rest_framework.permissions.AllowAny', - ), - 'DEFAULT_FILTER_BACKENDS': ( - 'rest_framework.filters.DjangoFilterBackend', - 'rest_framework.filters.OrderingFilter', - ), - 'DEFAULT_RENDERER_CLASSES': ( - 'rest_framework.renderers.JSONRenderer', - ), - 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination', - 'DEFAULT_VERSIONING_CLASS': 'rest_framework.versioning.NamespaceVersioning', - 'EXCEPTION_HANDLER': 'gateway.exceptions.ax_exception_handler' -} - -CRONJOBS = [ - ('*/15 * * * *', 'axjira.cron.check_github_whitelist', '>> /var/log/cron.log 2>&1') -] \ No newline at end of file diff --git a/devops/src/ax/devops/apps/gateway/gateway/urls.py b/devops/src/ax/devops/apps/gateway/gateway/urls.py deleted file mode 100644 index 7444771c3a62..000000000000 --- a/devops/src/ax/devops/apps/gateway/gateway/urls.py +++ /dev/null @@ -1,38 +0,0 @@ -from django.conf.urls import url, include - -from rest_framework import routers - -from gateway.views import hello_world, resource_not_found -from axjira.api import events, JiraIssueViewSet, JiraIssueTypeViewSet, JiraProjectViewSet, JiraUserViewSet, JiraWebhookViewSet -from result.api import ResultViewSet -from scm.api import branches, commit, commits, files, SCMViewSet - - -class AXRouter(routers.DefaultRouter): - def get_urls(self): - urls_static = [ - url(r'^$', hello_world), - url(r'^jira/events$', events), - url(r'^scm/branches$', branches), - url(r'^scm/commits$', commits), - url(r'^scm/commits/(?P[a-z0-9]+)$', commit), - url(r'^scm/files$', files) - ] - urls_dynamic = super(AXRouter, self).get_urls() - return urls_static + urls_dynamic - - -router_v1 = AXRouter(trailing_slash=False) -router_v1.register(r'jira/issues', JiraIssueViewSet, base_name='jira-issue') -router_v1.register(r'jira/issuetypes', JiraIssueTypeViewSet, base_name='jira-issuetype') -router_v1.register(r'jira/projects', JiraProjectViewSet, base_name='jira-project') -router_v1.register(r'jira/users', JiraUserViewSet, base_name='jira-user') -router_v1.register(r'jira/webhooks', JiraWebhookViewSet, base_name='jira-webhook') -router_v1.register(r'results', ResultViewSet) -router_v1.register(r'scm', SCMViewSet) - -urlpatterns = [ - url(r'^$', hello_world), - url(r'^v1/', include(router_v1.urls, namespace='v1')), - url(r'^.*$', resource_not_found) -] diff --git a/devops/src/ax/devops/apps/gateway/gateway/views.py b/devops/src/ax/devops/apps/gateway/gateway/views.py deleted file mode 100644 index 62d110c4abdb..000000000000 --- a/devops/src/ax/devops/apps/gateway/gateway/views.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -import random - -from django.shortcuts import redirect - -from rest_framework.decorators import api_view -from rest_framework.exceptions import NotFound -from rest_framework.response import Response - - -@api_view(['GET']) -def hello_world(request): - """A hello world API for user to test if gateway is up. - - :param request: - :return: - """ - messages = [ - 'Hello!', - 'Greetings!', - 'Nice to see you!', - 'Welcome!', - ] - return Response({'message': random.choice(messages)}) - - -@api_view(['GET', 'POST', 'PUT', 'DELETE', 'HEAD', 'OPTIONS', 'PATCH']) -def resource_not_found(request): - """An API for all requests not matching any URLs. - - :param request: - :return: - """ - if not request.path.endswith('/'): - return redirect(request.path + '/') - else: - raise NotFound('Requested resource ({}) not found'.format(request.path)) diff --git a/devops/src/ax/devops/apps/gateway/gateway/wsgi.py b/devops/src/ax/devops/apps/gateway/gateway/wsgi.py deleted file mode 100644 index ea426b2ff793..000000000000 --- a/devops/src/ax/devops/apps/gateway/gateway/wsgi.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -import os - -from django.core.wsgi import get_wsgi_application - -os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gateway.settings") - -application = get_wsgi_application() diff --git a/devops/src/ax/devops/apps/gateway/manage.py b/devops/src/ax/devops/apps/gateway/manage.py deleted file mode 100755 index 0e04f5f58c76..000000000000 --- a/devops/src/ax/devops/apps/gateway/manage.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# - -from ax.util.az_patch import az_patch -az_patch() - -import os -import sys - - -if __name__ == "__main__": - os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gateway.settings") - - from django.core.management import execute_from_command_line - - execute_from_command_line(sys.argv) diff --git a/devops/src/ax/devops/apps/gateway/result/__init__.py b/devops/src/ax/devops/apps/gateway/result/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/devops/src/ax/devops/apps/gateway/result/api.py b/devops/src/ax/devops/apps/gateway/result/api.py deleted file mode 100644 index 340a4f541fec..000000000000 --- a/devops/src/ax/devops/apps/gateway/result/api.py +++ /dev/null @@ -1,109 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -import json -import jwt -import logging -import requests -import time - -from django.shortcuts import redirect -from rest_framework.mixins import ListModelMixin, CreateModelMixin, RetrieveModelMixin, DestroyModelMixin -from rest_framework.viewsets import GenericViewSet -from rest_framework.decorators import detail_route, list_route -from rest_framework.response import Response - -from gateway.settings import LOGGER_NAME -from result.models import Result -from result.serializers import ResultSerializer - -from ax.devops.axdb.axdb_client import AxdbClient -from ax.devops.redis.redis_client import RedisClient, DB_RESULT -from ax.devops.kafka.kafka_client import EventNotificationClient -from ax.exceptions import AXApiInvalidParam -from ax.notification_center import FACILITY_PLATFORM, CODE_PLATFORM_ERROR - -logger = logging.getLogger('{}.{}'.format(LOGGER_NAME, 'result')) - -axdb_client = AxdbClient() -redis_client = RedisClient(host='redis.axsys', db=DB_RESULT, retry_max_attempt=10, retry_wait_fixed=5000) -event_notification_client = EventNotificationClient(FACILITY_PLATFORM) - - -class ResultViewSet(ListModelMixin, CreateModelMixin, RetrieveModelMixin, DestroyModelMixin, GenericViewSet): - """View set for result.""" - - queryset = Result.objects.all() - serializer_class = ResultSerializer - - @detail_route(methods=['GET', ]) - def approval(self, request, *args, **kwargs): - """Save an approval result in redis.""" - token = request.query_params.get('token', None) - result = jwt.decode(token, 'ax', algorithms=['HS256']) - result['timestamp'] = int(time.time()) - - logger.info("Decode token {}, \n to {}".format(token, json.dumps(result, indent=2))) - - # differentiate key for approval result from the task result - uuid = result['leaf_id'] + '-axapproval' - try: - logger.info("Setting approval result (%s) to Redis ...", uuid) - try: - state = axdb_client.get_approval_info(root_id=result['root_id'], leaf_id=result['leaf_id']) - if state and state[0]['result'] != 'WAITING': - return redirect("https://{}/error/404/type/ERR_AX_ILLEGAL_OPERATION;msg=The%20link%20is%20no%20longer%20valid.".format(result['dns'])) - - if axdb_client.get_approval_results(leaf_id=result['leaf_id'], user=result['user']): - return redirect("https://{}/error/404/type/ERR_AX_ILLEGAL_OPERATION;msg=Response%20has%20already%20been%20submitted.".format(result['dns'])) - - # push result to redis (brpop) - redis_client.rpush(uuid, value=result, encoder=json.dumps) - except Exception as exc: - logger.exception(exc) - pass - # save result to axdb - axdb_client.create_approval_results(leaf_id=result['leaf_id'], - root_id=result['root_id'], - result=result['result'], - user=result['user'], - timestamp=result['timestamp']) - except Exception as e: - msg = 'Failed to save approval result to Redis: {}'.format(e) - logger.error(msg) - raise - else: - logger.info('Successfully saved result to Redis') - return redirect("https://{}/success/201;msg=Response%20has%20been%20submitted%20successfully.".format(result['dns'])) - - @list_route(methods=['PUT', ]) - def test_nexus_credential(self, request): - logger.info('Received testing request (payload: %s)', request.data) - username = request.data.get('username', "") - password = request.data.get('password', "") - port = request.data.get('port', 8081) - hostname = request.data.get('hostname', None) - - if not hostname: - raise AXApiInvalidParam('Missing required parameters: Hostname', detail='Missing required parameters, hostname') - - response = requests.get('{}:{}/nexus/service/local/users'.format(hostname, port), auth=(username, password), timeout=10) - - if response.ok: - return Response({}) - else: - response.raise_for_status() - - @list_route(methods=['POST', ]) - def redirect_notification_center(self, request): - logger.info('Received redirecting nc request (payload: %s)', request.data) - detail = request.data.get('detail', "") - try: - event_notification_client.send_message_to_notification_center(CODE_PLATFORM_ERROR, detail={'message': detail}) - except Exception: - logger.exception("Failed to send event to notification center") - raise - return Response({}) diff --git a/devops/src/ax/devops/apps/gateway/result/apps.py b/devops/src/ax/devops/apps/gateway/result/apps.py deleted file mode 100644 index ac9c9c599e19..000000000000 --- a/devops/src/ax/devops/apps/gateway/result/apps.py +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from django.apps import AppConfig - - -class ResultConfig(AppConfig): - name = 'result' diff --git a/devops/src/ax/devops/apps/gateway/result/models.py b/devops/src/ax/devops/apps/gateway/result/models.py deleted file mode 100644 index d19cd5fe5b2e..000000000000 --- a/devops/src/ax/devops/apps/gateway/result/models.py +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from django.db import models - - -class Result(models.Model): - """DevOps result. - - Currently, this model is only used for enabling REST API. - """ diff --git a/devops/src/ax/devops/apps/gateway/result/serializers.py b/devops/src/ax/devops/apps/gateway/result/serializers.py deleted file mode 100644 index 78be1a4c51fb..000000000000 --- a/devops/src/ax/devops/apps/gateway/result/serializers.py +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2015-2016 Applatix, Inc. All rights reserved. -# -# -*- coding: utf-8 -*- -# -from result.models import Result - -from rest_framework import serializers - - -class ResultSerializer(serializers.ModelSerializer): - """Serializer for result.""" - - class Meta: - model = Result diff --git a/devops/src/ax/devops/apps/gateway/scm/__init__.py b/devops/src/ax/devops/apps/gateway/scm/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/devops/src/ax/devops/apps/gateway/scm/api.py b/devops/src/ax/devops/apps/gateway/scm/api.py deleted file mode 100644 index 7e5bc333c724..000000000000 --- a/devops/src/ax/devops/apps/gateway/scm/api.py +++ /dev/null @@ -1,1018 +0,0 @@ -import boto3 -import copy -import datetime -import json -import logging -import os -import re -import shutil -import socket -from concurrent.futures import ThreadPoolExecutor, as_completed -from retrying import retry -from urllib.parse import unquote - -from rest_framework.decorators import list_route, api_view -from rest_framework.response import Response -from rest_framework.viewsets import GenericViewSet - -from gateway.kafka import event_notification_client -from gateway.settings import LOGGER_NAME -from scm.models import SCM -from scm.serializers import SCMSerializer - -from ax.devops.apps.workers.repo_manager import BASE_DIR -from ax.devops.axdb.axdb_client import AxdbClient -from ax.devops.axdb.axops_client import AxopsClient -from ax.devops.axsys.axsys_client import AxsysClient -from ax.devops.ci.constants import AxEventTypes, ScmVendors -from ax.devops.ci.event_translators import EventTranslator -from ax.devops.kafka.kafka_client import ProducerClient -from ax.devops.redis.redis_client import RedisClient, DB_REPORTING -from ax.devops.scm.scm import GitClient -from ax.devops.scm_rest.bitbucket_client import BitBucketClient -from ax.devops.scm_rest.github_client import GitHubClient -from ax.devops.scm_rest.gitlab_client import GitLabClient -from ax.devops.settings import AxSettings -from ax.devops.utility.utilities import AxPrettyPrinter, parse_repo, top_k, sort_str_dictionaries -from ax.exceptions import AXApiInvalidParam, AXApiAuthFailed, AXApiForbiddenReq, AXApiInternalError -from ax.notification_center import CODE_JOB_CI_STATUS_REPORTING_FAILURE, CODE_JOB_CI_ELB_CREATION_FAILURE, \ - CODE_JOB_CI_ELB_VERIFICATION_TIMEOUT, CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, CODE_JOB_CI_ELB_DELETION_FAILURE, \ - CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, CODE_JOB_CI_EVENT_CREATION_FAILURE, CODE_JOB_CI_EVENT_TRANSLATE_FAILURE, \ - CODE_JOB_CI_YAML_UPDATE_FAILURE, CODE_CONFIGURATION_SCM_CONNECTION_ERROR - -logger = logging.getLogger('{}.{}'.format(LOGGER_NAME, 'scm')) - -TYPE_BITBUCKET = ScmVendors.BITBUCKET -TYPE_GITHUB = ScmVendors.GITHUB -TYPE_GITLAB = ScmVendors.GITLAB -TYPE_GIT = ScmVendors.GIT -TYPE_CODECOMMIT = ScmVendors.CODECOMMIT -SUPPORTED_TYPES = { - TYPE_BITBUCKET, - TYPE_GITHUB, - TYPE_GITLAB, - TYPE_GIT, - TYPE_CODECOMMIT -} -NAMESPACE = 'gateway' -BRANCH_CACHE_TTL = 5 * 60 # 5 minutes TTL as we expect we won't finish upgrade within 5 minutes - -CLUSTER_NAME_ID = os.environ.get('AX_CLUSTER') -CUSTOMER_ID = os.environ.get('AX_CUSTOMER_ID') -S3_BUCKET_NAME = 'applatix-cluster-{account}-{seq}'.format(account=CUSTOMER_ID, seq=0) -s3_bucket = boto3.resource('s3').Bucket(S3_BUCKET_NAME) - -axdb_client = AxdbClient() -axops_client = AxopsClient() -axsys_client = AxsysClient() -redis_client = RedisClient('redis', db=DB_REPORTING) - - -class SCMViewSet(GenericViewSet): - """View set for SCM.""" - - queryset = SCM.objects.all() - serializer_class = SCMSerializer - - scm_clients = { - ScmVendors.BITBUCKET: BitBucketClient(), - ScmVendors.GITHUB: GitHubClient(), - ScmVendors.GITLAB: GitLabClient() - } - supported_protocols = {'https'} - - @list_route(methods=['POST', ]) - def test(self, request): - """Test connection to SCM server. - - :param request: - :return: - """ - scm_type = request.data.get('type', '').lower() - url = request.data.get('url', '').lower() - username = request.data.get('username', None) - password = request.data.get('password', None) - logger.info('Received request (type: %s, url: %s, username: %s, password: ******)', scm_type, url, username) - if not scm_type: - raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (type)') - if scm_type not in SUPPORTED_TYPES: - raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(scm_type)) - if scm_type == TYPE_GIT: - assert url, AXApiInvalidParam('Missing required parameters', - detail='Require parameter (url) when type is {}'.format(TYPE_GIT)) - else: - assert all([username, password]), AXApiInvalidParam('Missing required parameters', - detail='Required parameters (username, password, url)') - try: - repos = self.get_repos(scm_type, url, username, password) - except Exception as e: - logger.warning('Failed to get repositories: %s', e) - raise AXApiInternalError('Failed to get repositories', detail=str(e)) - else: - return Response({'repos': repos}) - - def get_repos(self, scm_type, url, username, password): - """Get all repos owned by the user. - - :param scm_type: - :param url: - :param username: - :param password: - :return: - """ - if scm_type in {TYPE_BITBUCKET, TYPE_GITHUB, TYPE_GITLAB}: - try: - repos = self.scm_clients[scm_type].get_repos(username, password) - except Exception as e: - logger.warning('Unable to connect to %s: %s', scm_type, e) - detail = { - 'type': scm_type, - 'username': username, - 'error': str(e.detail) - } - event_notification_client.send_message_to_notification_center(CODE_CONFIGURATION_SCM_CONNECTION_ERROR, - detail=detail) - raise AXApiInvalidParam('Cannot connect to %s server' % scm_type) - else: - return repos - elif scm_type == TYPE_GIT: - _, vendor, repo_owner, repo_name = parse_repo(url) - path = '/tmp/{}/{}/{}'.format(vendor, repo_owner, repo_name) - if os.path.isfile(path): - os.remove(path) - if os.path.isdir(path): - shutil.rmtree(path) - os.makedirs(path) - client = GitClient(path=path, repo=url, username=username, password=password) - try: - client.list_remote() - except Exception as e: - logger.warning('Unable to connect to git server (%s): %s', url, e) - detail = { - 'type': scm_type, - 'url': url, - 'username': username, - 'error': str(e) - } - event_notification_client.send_message_to_notification_center(CODE_CONFIGURATION_SCM_CONNECTION_ERROR, - detail=detail) - raise AXApiInvalidParam('Cannot connect to git server') - else: - return {url: url} - elif scm_type == TYPE_CODECOMMIT: - repos = {} - region = 'us-east-1' - default_url_format = 'https://git-codecommit.{}.amazonaws.com/v1/repos/{}' - client = boto3.client('codecommit', aws_access_key_id=username, aws_secret_access_key=password, - region_name=region) - try: - response = client.list_repositories().get('repositories', []) - for r in response: - repo_url = default_url_format.format(region, r['repositoryName']) - repos[repo_url] = repo_url - except Exception as exc: - detail = { - 'type': scm_type, - 'region': region, - 'url': default_url_format.format(region, ''), - 'username': username, - 'error': 'Cannot connect to CodeCommit' + str(exc) - } - event_notification_client.send_message_to_notification_center(CODE_CONFIGURATION_SCM_CONNECTION_ERROR, - detail=detail) - raise AXApiInvalidParam('Cannot connect to CodeCommit: %s' % exc) - else: - return repos - else: - return {} - - @list_route(methods=['POST', ]) - def events(self, request): - """Create a DevOps event. - - :param request: - :return: - """ - payload, headers = request.data, request.META - try: - logger.info('Translating SCM event ...') - events = EventTranslator.translate(payload, headers) - except Exception as e: - logger.error('Failed to translate event: %s', e) - # Todo Tianhe Issue: #330 comment out for now because it is distracting - # event_notification_client.send_message_to_notification_center(CODE_JOB_CI_EVENT_TRANSLATE_FAILURE, - # detail={ - # 'payload': payload, - # 'error': str(e) - # }) - raise AXApiInternalError('Failed to translate event', detail=str(e)) - else: - logger.info('Successfully translated event') - - kafka_client = ProducerClient() - successful_events = [] - for event in events: - if event['type'] == AxEventTypes.PING: - logger.info('Received a PING event, skipping service creation ...') - continue - else: - try: - logger.info('Creating AX event ...\n%s', AxPrettyPrinter().pformat(event)) - key = '{}_{}_{}'.format(event['repo'], event['branch'], event['commit']) - kafka_client.send(AxSettings.TOPIC_DEVOPS_CI_EVENT, key=key, value=event, timeout=120) - except Exception as e: - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_EVENT_CREATION_FAILURE, - detail={ - 'event_type': event.get('type', 'UNKNOWN'), - 'error': str(e) - }) - logger.warning('Failed to create AX event: %s', e) - else: - logger.info('Successfully created AX event') - successful_events.append(event) - kafka_client.close() - return Response(successful_events) - - @list_route(methods=['POST']) - def reports(self, request): - """Report build/test status to source control tool. - - :param request: - :return: - """ - logger.info('Received reporting request (payload: %s)', request.data) - id = request.data.get('id') - repo = request.data.get('repo') - if not id: - raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (id)') - - try: - if not repo: - cache = redis_client.get(request.data['id'], decoder=json.loads) - repo = cache['repo'] - vendor = axops_client.get_tool(repo)['type'] - if vendor not in self.scm_clients.keys(): - raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(vendor)) - result = self.scm_clients[vendor].upload_job_result(request.data) - if result == -1: - logger.info('GitHub does not support status report for the non-sha commits. Skip.') - except Exception as e: - logger.error('Failed to report status: %s', e) - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_STATUS_REPORTING_FAILURE, - detail=request.data) - raise AXApiInternalError('Failed to report status', detail=str(e)) - else: - logger.info('Successfully reported status') - return Response(result) - - @staticmethod - def _has_webhook(repo): - """Test if there is any repo which uses webhook. - - :param repo: - :return: - """ - tools = axops_client.get_tools(category='scm') - for i in range(len(tools)): - use_webhook = tools[i].get('use_webhook', False) - repos = set(tools[i].get('repos', [])) - repos -= {repo} - if use_webhook and repos: - return True - return False - - def _get_webhook(self, vendor, repo): - """Get webhook - - :param vendor: - :param repo: - :returns: - """ - logger.info('Retrieving webhook (repo: %s) ...', repo) - return self.scm_clients[vendor].get_webhook(repo) - - def _create_webhook(self, vendor, repo): - """Create webhook - - :param vendor: - :param repo: - :returns: - """ - - @retry(wait_fixed=5000, stop_max_delay=20 * 60 * 1000) - def _verify_elb(hostname): - try: - logger.info('Verifying ELB (%s) ...', hostname) - ip = socket.gethostbyname(hostname) - logger.info('Successfully resolved ELB (%s) to IP (%s)', hostname, ip) - except Exception as e: - logger.error('ELB not ready: %s', str(e)) - raise AXApiInternalError('ELB not ready', str(e)) - - ip_range = self.scm_clients[vendor].get_webhook_whitelist() - - # Create ELB - payload = {'ip_range': ip_range, 'external_port': 8443, 'internal_port': 8087} - try: - logger.info('Creating ELB for webhook ...') - result = axsys_client.create_webhook(**payload) - except Exception as e: - logger.error('Failed to create ELB for webhook: %s', str(e)) - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_CREATION_FAILURE, - detail=payload) - raise AXApiInternalError('Failed to create ELB for webhook', str(e)) - else: - logger.info('Successfully created ELB for webhook') - - # Verify ELB - hostname = result['hostname'] - try: - _verify_elb(hostname) - except Exception as e: - logger.error('Timed out on waiting for ELB to be available: %s', str(e)) - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_VERIFICATION_TIMEOUT, - detail={'hostname': hostname}) - raise AXApiInternalError('Timed out on waiting for ELB to be available: %s' % str(e)) - - # Create webhook - try: - logger.info('Creating webhook (repo: %s) ...', repo) - self.scm_clients[vendor].create_webhook(repo) - except AXApiAuthFailed as e: - logger.error('Invalid credential supplied') - detail = { - 'repo': repo, - 'error': 'Invalid credential supplied:' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, - detail=detail) - raise AXApiInvalidParam('User authentication failed', detail=str(e)) - except AXApiForbiddenReq as e: - logger.error('Supplied credential is valid but having insufficient permission') - detail = { - 'repo': repo, - 'error': 'Supplied credential is valid but having insufficient permission:' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, - detail=detail) - raise AXApiInvalidParam('User has insufficient permission', detail=str(e)) - except Exception as e: - logger.error('Failed to configure webhook: %s', e) - detail = { - 'repo': repo, - 'error': 'Failed to configure webhook:' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, - detail=detail) - raise AXApiInternalError('Failed to configure webhook', str(e)) - else: - logger.info('Successfully created webhook (repo: %s)', repo) - return {} - - def _delete_webhook(self, vendor, repo): - """Delete webhook - - :param vendor: - :param repo: - :returns: - """ - # Delete webhook - try: - logger.info('Deleting webhook (repo: %s) ...', repo) - self.scm_clients[vendor].delete_webhook(repo) - except AXApiAuthFailed as e: - logger.error('Invalid credential supplied') - detail = { - 'repo': repo, - 'error': 'Invalid credential supplied:' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, - detail=detail) - raise AXApiInvalidParam('User authentication failed', detail=str(e)) - except AXApiForbiddenReq as e: - logger.error('Supplied credential is valid but having insufficient permission') - detail = { - 'repo': repo, - 'error': 'Supplied credential is valid but having insufficient permission:' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, - detail=detail) - raise AXApiInvalidParam('User has insufficient permission', detail=str(e)) - except Exception as e: - logger.error('Failed to delete webhook: %s', e) - detail = { - 'repo': repo, - 'error': 'Failed to delete webhook:' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, - detail=detail) - raise AXApiInternalError('Failed to delete webhook', str(e)) - else: - logger.info('Successfully deleted webhook (repo: %s)', repo) - - # Delete ELB - try: - if not self._has_webhook(repo): - logger.info('Deleting ELB for webhook ...') - axsys_client.delete_webhook() - except Exception as e: - logger.error('Failed to delete ELB for webhook: %s', str(e)) - detail = {'repo': repo, - 'error': 'Failed to delete ELB for webhook' + str(e) - } - event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_DELETION_FAILURE, - detail=detail) - raise AXApiInternalError('Failed to delete ELB for webhook', str(e)) - else: - logger.info('Successfully deleted ELB for webhook') - return {} - - @list_route(methods=['GET', 'POST', 'DELETE']) - def webhooks(self, request): - """Create / delete a webhook. - - :param request:Translating - :return: - """ - repo = request.data.get('repo') - vendor = request.data.get('type') - username = request.data.get('username') - password = request.data.get('password') - if not all([repo, vendor]): - raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (repo, type)') - if vendor not in self.scm_clients.keys(): - raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(vendor)) - - if username and password: - self.scm_clients[vendor].update_repo_info(repo, vendor, username, password) - if request.method == 'GET': - result = self._get_webhook(vendor, repo) - elif request.method == 'POST': - result = self._create_webhook(vendor, repo) - else: - result = self._delete_webhook(vendor, repo) - return Response(result) - - @list_route(methods=['POST']) - def yamls(self, request): - """Update YAML contents (i.e. policy, template). - - :param request: - :return: - """ - vendor = request.data.get('type') - repo = request.data.get('repo') - branch = request.data.get('branch') - if not all([vendor, repo, branch]): - raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (type, repo, branch)') - if vendor not in self.scm_clients.keys(): - raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(vendor)) - - try: - # The arrival of events may not always be in the natural order of commits. For - # example, the user may resent an old event from UI of source control tool. In - # this case, we may update the YAML contents to an older version. To avoid this, - # we guarantee that every YAML update will only update the content to the latest - # version on a branch. More specifically, whenever we receive an event, we extract - # the repo and branch information, and find the HEAD of the branch. Then, we use - # the commit of HEAD to retrieve the YAML content, and update policies/templates - # correspondingly. - scm_client = self.scm_clients[vendor] - commit = scm_client.get_branch_head(repo, branch) - yaml_files = scm_client.get_yamls(repo, commit) - logger.info('Updating YAML contents (policy/template) ...') - axops_client.update_yamls(repo, branch, commit, yaml_files) - except Exception as e: - if 'Branch not found' in e.message: - logger.info('No need to update YAML contents') - return Response() - else: - logger.error('Failed to update YAML contents: %s', e) - event_notification_client.send_message_to_notification_center( - CODE_JOB_CI_YAML_UPDATE_FAILURE, detail={'vendor': vendor, - 'repo': repo, - 'branch': branch, - 'error': str(e)}) - raise AXApiInternalError('Failed to update YAML contents', str(e)) - else: - logger.info('Successfully updated YAML contents') - return Response() - - -def purge_branches(repo, branch=None): - """Purge branch heads. - - :param repo: - :param branch: - :return: - """ - if not repo: - raise AXApiInvalidParam('Missing required parameter', 'Missing required parameter (repo)') - logger.info('Purging branch heads (repo: %s, branch: %s) ...', repo, branch) - - try: - if not branch: - axdb_client.purge_branch_heads(repo) - else: - axdb_client.purge_branch_head(repo, branch) - except Exception as e: - message = 'Unable to purge branch heads' - detail = 'Unable to purge branch heads (repo: {}, branch: {}): {}'.format(repo, branch, str(e)) - logger.error(detail) - raise AXApiInternalError(message, detail) - else: - logger.info('Successfully purged branch heads') - - -def get_branches(repo=None, branch=None, order_by=None, limit=None): - """Get branches. - - :param repo: - :param branch: - :param order_by: - :param limit: - :return: - """ - - def _get_branches(workspace): - """Retrieve list of remote branches in the workspace. - - :param workspace: - :return: a list of dictionaries. - """ - try: - key = '{}:{}'.format(NAMESPACE, workspace) - if redis_client.exists(key): - logger.info('Loading cache (workspace: %s) ...', workspace) - results = redis_client.get(key, decoder=json.loads) - return results - else: - logger.info('Scanning workspace (%s) ...', workspace) - git_client = GitClient(path=workspace, read_only=True) - repo = git_client.get_remote() - branches = git_client.get_remote_heads() - results = [] - for i in range(len(branches)): - results.append({ - 'repo': repo, - 'name': branches[i]['reference'], - 'revision': branches[i]['commit'], - 'commit_date': branches[i]['commit_date'] - }) - logger.info('Saving cache (workspace: %s) ...', workspace) - redis_client.set(key, results, expire=BRANCH_CACHE_TTL, encoder=json.dumps) - return results - except Exception as e: - logger.warning('Failed to scan workspace (%s): %s', workspace, e) - return [] - - logger.info('Retrieving branches (repo: %s, branch: %s) ...', repo, branch) - if repo: - repo = unquote(repo) - _, vendor, repo_owner, repo_name = parse_repo(repo) - workspaces = ['{}/{}/{}/{}'.format(BASE_DIR, vendor, repo_owner, repo_name)] - else: - dirs = [dir[0] for dir in os.walk(BASE_DIR) if dir[0].endswith('/.git')] - workspaces = list(map(lambda v: v[:-5], dirs)) - - branches = [] - with ThreadPoolExecutor(max_workers=20) as executor: - futures = [] - for i in range(len(workspaces)): - futures.append(executor.submit(_get_branches, workspaces[i])) - for future in as_completed(futures): - try: - data = future.result() - except Exception as e: - logger.warning('Unexpected exception occurred during processing: %s', e) - else: - for i in range(len(data)): - branches.append(data[i]) - if branch: - pattern = '.*{}.*'.format(branch.replace('*', '.*')) - branches = [branches[i] for i in range(len(branches)) if re.match(pattern, branches[i]['name'])] - if order_by == 'commit_date': - branches = sorted(branches, key=lambda v: v['commit_date']) - elif order_by == '-commit_date': - branches = sorted(branches, key=lambda v: v['commit_date'], reverse=True) - elif order_by == '-native': - branches = sorted(branches, key=lambda v: (v['repo'], v['name']), reverse=True) - else: - branches = sorted(branches, key=lambda v: (v['repo'], v['name'])) - if limit: - branches = branches[:limit] - logger.info('Successfully retrieved %s branches', len(branches)) - return branches - - -@api_view(['GET', 'DELETE']) -def branches(request): - """Query branches. - - :param request: - :return: - """ - repo = request.query_params.get('repo') - branch = request.query_params.get('branch') or request.query_params.get('name') - if request.method == 'DELETE': - purge_branches(repo, branch) - return Response({}) - else: - if branch and branch.startswith('~'): - branch = branch[1:] - order_by = request.query_params.get('order_by') - limit = request.query_params.get('limit') - if limit: - limit = int(limit) - branches = get_branches(repo, branch, order_by, limit) - return Response({'data': branches}) - - -def _get_commits(workspace, branch=None, since=None, until=None, commit=None, author=None, committer=None, - description=None, limit=None): - """Search for commits in a workspace. - - :param workspace: - :param branch: - :param since: - :param until: - :param commit: - :param author: - :param committer: - :param description: - :param limit: - :return: a list of generators. - """ - try: - logger.info('Scanning workspace (%s) for commits ...', workspace) - git_client = GitClient(path=workspace, read_only=True) - if commit and commit.startswith('~'): - commit = commit[1:] - if author and author.startswith('~'): - author = author[1:] - if committer and committer.startswith('~'): - committer = committer[1:] - if description and description.startswith('~'): - description = description[1:] - return git_client.get_commits(branch=branch, commit=commit, since=since, until=until, author=author, - committer=committer, description=description, limit=limit) - except Exception as e: - logger.warning('Failed to scan workspace (%s): %s', workspace, e) - - -def _get_commit(workspace, commit): - """Get a commit from a workspace. - - :param workspace: - :param commit: - :return: - """ - try: - logger.info('Scanning workspace (%s) for commit (%s) ...', workspace, commit) - git_client = GitClient(path=workspace, read_only=True) - return git_client.get_commit(commit) - except Exception as e: - logger.warning('Failed to scan workspace (%s): %s', workspace, e) - - -def _parse_repo_branch(repo, branch, repo_branch): - """Parse repo / branch / repo_branch. - - :param repo: - :param branch: - :param repo_branch: - :return: - """ - if repo: - try: - repo = unquote(repo) - _, vendor, repo_owner, repo_name = parse_repo(repo) - except Exception as e: - msg = 'Unable to parse repo: %s', e - logger.error(msg) - raise AXApiInvalidParam('Unable to parse repo', msg) - else: - dir = '{}/{}/{}/{}'.format(BASE_DIR, vendor, repo_owner, repo_name) - workspaces = {dir: [branch] if branch else []} - elif repo_branch: - try: - repo_branch = json.loads(repo_branch) - workspaces = {} - for repo in repo_branch.keys(): - repo = unquote(repo) - _, vendor, repo_owner, repo_name = parse_repo(repo) - dir = '{}/{}/{}/{}'.format(BASE_DIR, vendor, repo_owner, repo_name) - if dir not in workspaces: - workspaces[dir] = set() - for branch in repo_branch[repo]: - workspaces[dir].add(branch) - except Exception as e: - msg = 'Unable to parse repo_branch: %s' % str(e) - logger.error(msg) - raise AXApiInvalidParam('Unable to parse repo_branch', msg) - else: - dirs = [dir[0] for dir in os.walk(BASE_DIR) if dir[0].endswith('/.git')] - workspaces = list(map(lambda v: v[:-5], dirs)) - workspaces = dict([(k, [branch] if branch else []) for k in workspaces]) - return workspaces - - -@api_view(['GET']) -def commits(request): - """Query commits. - - :param request: - :return: - """ - # Repo and branch are optional parameters that can always be used to reduce - # search scope. Repo is used to construct the path to the workspace so that - # the number of commands we issue can be significantly reduced. Branch can - # be used in every command to filter commits by reference (branch). - repo = request.query_params.get('repo') - branch = request.query_params.get('branch') - repo_branch = request.query_params.get('repo_branch') - if repo_branch and (repo or branch): - raise AXApiInvalidParam('Ambiguous query condition', 'It is ambiguous to us to supply both repo_branch and repo/branch') - workspaces = _parse_repo_branch(repo, branch, repo_branch) - - # If commit / revision is supplied, we will disrespect all other parameters. - # Also, we no longer use `git log` to issue query but use `git show` to directly - # show the commit information. - commit = request.query_params.get('commit') or request.query_params.get('revision') - - # Full-text search can be performed against 3 fields: author, committer, and description. - # To perform narrow search, specify `author=~&committer=~&description=~`. - # To perform broad search, specify `search=~`. - # Note that, in git, all queries are full-text search already, so we will strip off `~`. - search = request.query_params.get('search') - author = request.query_params.get('author', None) - committer = request.query_params.get('committer', None) - description = request.query_params.get('description', None) - if search: - use_broad_search = True - else: - use_broad_search = False - if author: - author = author.split(',') - else: - author = [None] - if committer: - committer = committer.split(',') - else: - committer = [None] - author_committer = [] - for i in range(len(author)): - for j in range(len(committer)): - author_committer.append([author[i], committer[j]]) - - # We use time-based pagination. min_time is converted to since and max_time is - # converted to until. Also, the time format seconds since epoch (UTC). - since = request.query_params.get('min_time') - until = request.query_params.get('max_time') - if since: - since = datetime.datetime.utcfromtimestamp(int(since)).strftime('%Y-%m-%dT%H:%M:%S') - if until: - until = datetime.datetime.utcfromtimestamp(int(until)).strftime('%Y-%m-%dT%H:%M:%S') - - # Limit specify the maximal records that we return. Fields specify the fields - # that we return. Sort allows the sorting of final results. - limit = request.query_params.get('limit') - fields = request.query_params.get('fields') - sorter = request.query_params.get('sort') - if limit: - limit = int(limit) - if fields: - fields = set(fields.split(',')) - if sorter: - sorters = sorter.split(',') - valid_keys = {'repo', 'revision', 'author', 'author_date', 'committer', 'commit_date', 'date', 'description'} - valid_sorters = [] - for i in range(len(sorters)): - key = sorters[i][1:] if sorters[i].startswith('-') else sorters[i] - if key in valid_keys: - valid_sorters.append(sorters[i]) - sorter = valid_sorters - - logger.info('Retrieving commits (repo: %s, branch: %s, commit: %s, limit: %s) ...', repo, branch, commit, limit) - - # Prepare arguments for workspace scanning - search_conditions = [] - for key in workspaces.keys(): - if not os.path.isdir(key): # If the workspace does not exist, we should skip scanning it - continue - elif commit: - search_conditions.append({'workspace': key, 'commit': commit}) - elif use_broad_search: - for j in range(len(author_committer)): - _author, _committer = author_committer[j][0], author_committer[j][1] - _search_dict = {'workspace': key, - 'branch': list(workspaces[key]), - 'since': since, - 'until': until, - 'limit': limit, - 'author': _author, - 'committer': _committer, - 'description': description, - } - for field in {'author', 'committer', 'description'}: - new_dict = copy.deepcopy(_search_dict) - new_dict[field] = search - search_conditions.append(new_dict) - else: - for j in range(len(author_committer)): - _author, _committer = author_committer[j][0], author_committer[j][1] - search_conditions.append({'workspace': key, 'branch': list(workspaces[key]), - 'author': _author, 'committer': _committer, 'description': description, - 'since': since, 'until': until, 'limit': limit}) - - # Scan workspaces - commits_list = [] - with ThreadPoolExecutor(max_workers=20) as executor: - futures = [] - for i in range(len(search_conditions)): - if commit: - futures.append(executor.submit(_get_commit, **search_conditions[i])) - else: - futures.append(executor.submit(_get_commits, **search_conditions[i])) - for future in as_completed(futures): - try: - data = future.result() - if data: - commits_list.append(data) - except Exception as e: - logger.warning('Unexpected exception occurred during processing: %s', e) - - if commit: - # If commit is supplied in the query, the return list is a list of commits, so we do not need to run top_k algorithm - top_commits = sorted(commits_list, key=lambda v: -v['date']) - else: - # Retrieve top k commits - top_commits = top_k(commits_list, limit, key=lambda v: -v['date']) - - # Sort commits - if sorter: - top_commits = sort_str_dictionaries(top_commits, sorter) - else: - top_commits = sorted(top_commits, key=lambda v: -v['date']) - - # Filter fields - for i in range(len(top_commits)): - for k in list(top_commits[i].keys()): - if fields is not None and k not in fields: - del top_commits[i][k] - logger.info('Successfully retrieved commits') - - return Response({'data': top_commits}) - - -@api_view(['GET']) -def commit(request, pk=None): - """Get a single commit. - - :param request: - :param pk: - :return: - """ - - def get_commits(commit, repo=None): - """Get commit(s) by commit hash. - - Normally, this function should return only 1 commit object. However, if a repo and its forked repo - both appear in our workspace, there could be multiple commit objects. - - :param commit: - :param repo: - :return: - """ - # If repo is not supplied, we need to scan all workspaces - if repo: - _, vendor, repo_owner, repo_name = parse_repo(repo) - workspaces = ['{}/{}/{}/{}'.format(BASE_DIR, vendor, repo_owner, repo_name)] - else: - dirs = [dir[0] for dir in os.walk(BASE_DIR) if dir[0].endswith('/.git')] - workspaces = list(map(lambda v: v[:-5], dirs)) - - commits = [] - with ThreadPoolExecutor(max_workers=20) as executor: - futures = [] - for i in range(len(workspaces)): - futures.append(executor.submit(_get_commit, workspaces[i], commit=commit)) - for future in as_completed(futures): - try: - data = future.result() - if data: - commits.append(data) - except Exception as e: - logger.warning('Unexpected exception occurred during processing: %s', e) - - return commits - - repo = request.query_params.get('repo') - if repo: - repo = unquote(repo) - logger.info('Retrieving commit (repo: %s, commit: %s) ...', repo, pk) - commits = get_commits(pk, repo) - if not commits: - logger.warning('Failed to retrieve commit') - raise AXApiInvalidParam('Invalid revision', detail='Invalid revision ({})'.format(pk)) - else: - if len(commits) > 1: - logger.warning('Found multiple commits with given sha, returning the first one ...') - logger.info('Successfully retrieved commit') - return Response(commits[0]) - - -@api_view(['PUT', 'DELETE']) -def files(request): - """Get a single file content and upload to s3. - - :param request: - :return: - """ - repo = request.query_params.get('repo') - branch = request.query_params.get('branch') - path = request.query_params.get('path') - if not all([repo, branch, path]): - raise AXApiInvalidParam('Missing required parameters', 'Missing required parameters (repo, branch, path)') - if path.startswith('/'): - path = path[1:] - - if request.method == 'PUT': - resp = _put_file(repo, branch, path) - else: - resp = _delete_file(repo, branch, path) - return Response(resp) - - -def _put_file(repo, branch, path): - """Put a file in s3. - - :param repo: - :param branch: - :param path: - :return: - """ - _, vendor, repo_owner, repo_name = parse_repo(repo) - workspace = '{}/{}/{}/{}'.format(BASE_DIR, vendor, repo_owner, repo_name) - if not os.path.isdir(workspace): - raise AXApiInvalidParam('Invalid repository', 'Invalid repository ({})'.format(repo)) - try: - logger.info('Extracting file content from repository (repo: %s, branch: %s, path: %s) ...', repo, branch, path) - git_client = GitClient(path=workspace, read_only=True) - files = git_client.get_files(branch=branch, subdir=path, binary_mode=True) - except Exception as e: - message = 'Failed to extract file content' - detail = '{}: {}'.format(message, str(e)) - logger.error(detail) - raise AXApiInternalError(message, detail) - else: - if len(files) == 0: - raise AXApiInvalidParam('Unable to locate file with given information') - file_content = files[0]['content'] - logger.info('Successfully extracted file content') - - try: - # Cluster name id always has the form -<36_bytes_long_cluster_id> - cluster_name, cluster_id = CLUSTER_NAME_ID[:-37], CLUSTER_NAME_ID[-36:] - key = '{cluster_name}/{cluster_id}/{vendor}/{repo_owner}/{repo_name}/{branch}/{path}'.format( - cluster_name=cluster_name, cluster_id=cluster_id, vendor=vendor, - repo_owner=repo_owner, repo_name=repo_name, branch=branch, path=path) - logger.info('Uploading file content to s3 (bucket: %s, key: %s) ...', S3_BUCKET_NAME, key) - response = s3_bucket.Object(key).put(Body=file_content) - etag = response.get('ETag') - if etag: - etag = json.loads(etag) - except Exception as e: - message = 'Failed to upload file content' - detail = '{}: {}'.format(message, str(e)) - logger.error(detail) - raise AXApiInternalError(message, detail) - else: - logger.info('Successfully uploaded file content') - return {'bucket': S3_BUCKET_NAME, 'key': key, 'etag': etag} - - -def _delete_file(repo, branch, path): - """Delete a file from s3. - - :param repo: - :param branch: - :param path: - :return: - """ - _, vendor, repo_owner, repo_name = parse_repo(repo) - try: - cluster_name, cluster_id = CLUSTER_NAME_ID[:-37], CLUSTER_NAME_ID[-36:] - key = '{cluster_name}/{cluster_id}/{vendor}/{repo_owner}/{repo_name}/{branch}/{path}'.format( - cluster_name=cluster_name, cluster_id=cluster_id, vendor=vendor, - repo_owner=repo_owner, repo_name=repo_name, branch=branch, path=path) - logger.info('Deleting file from s3 (bucket: %s, key: %s) ...', S3_BUCKET_NAME, key) - s3_bucket.Object(key).delete() - except Exception as e: - message = 'Failed to delete file' - detail = '{}: {}'.format(message, str(e)) - logger.error(detail) - raise AXApiInternalError(message, detail) - else: - logger.info('Successfully deleted file') - return {'bucket': S3_BUCKET_NAME, 'key': key} diff --git a/devops/src/ax/devops/apps/gateway/scm/apps.py b/devops/src/ax/devops/apps/gateway/scm/apps.py deleted file mode 100644 index da9fd8396d9c..000000000000 --- a/devops/src/ax/devops/apps/gateway/scm/apps.py +++ /dev/null @@ -1,5 +0,0 @@ -from django.apps import AppConfig - - -class ScmConfig(AppConfig): - name = 'scm' diff --git a/devops/src/ax/devops/apps/gateway/scm/models.py b/devops/src/ax/devops/apps/gateway/scm/models.py deleted file mode 100644 index b3b300b89cf9..000000000000 --- a/devops/src/ax/devops/apps/gateway/scm/models.py +++ /dev/null @@ -1,8 +0,0 @@ -from django.db import models - - -class SCM(models.Model): - """SCM model. - - Currently, this model is only used for enabling REST API. - """ diff --git a/devops/src/ax/devops/apps/gateway/scm/serializers.py b/devops/src/ax/devops/apps/gateway/scm/serializers.py deleted file mode 100644 index d39cf9db3e7e..000000000000 --- a/devops/src/ax/devops/apps/gateway/scm/serializers.py +++ /dev/null @@ -1,10 +0,0 @@ -from scm.models import SCM - -from rest_framework import serializers - - -class SCMSerializer(serializers.ModelSerializer): - """Serializer for SCM.""" - - class Meta: - model = SCM diff --git a/devops/src/ax/devops/apps/workers/__init__.py b/devops/src/ax/devops/apps/workers/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/devops/src/ax/devops/gateway/__init__.py b/devops/src/ax/devops/gateway/__init__.py new file mode 100644 index 000000000000..fbef2f58a434 --- /dev/null +++ b/devops/src/ax/devops/gateway/__init__.py @@ -0,0 +1 @@ +GATEWAY_DEFAULT_PORT = 8889 diff --git a/devops/src/ax/devops/gateway/constants.py b/devops/src/ax/devops/gateway/constants.py new file mode 100644 index 000000000000..3403c352e011 --- /dev/null +++ b/devops/src/ax/devops/gateway/constants.py @@ -0,0 +1,79 @@ +from ax.devops.utility.utilities import AxEnum + + +class ScmTypes(AxEnum): + """SCM types.""" + + GIT = 'git' + + +class ScmVendors(AxEnum): + """SCM Vendors.""" + + BITBUCKET = 'bitbucket' + GITHUB = 'github' + GITLAB = 'gitlab' + GIT = 'git' + CODECOMMIT = 'codecommit' + + +class AxEventTypes(AxEnum): + """Event types in AX.""" + + CREATE = 'create' + PING = 'ping' + PUSH = 'push' + PULL_REQUEST = 'pull_request' + PULL_REQUEST_MERGE = 'pull_request_merge' + + +class BitBucketEventTypes(AxEnum): + """Event types of BitBucket.""" + + REPO_PUSH = 'repo:push' + PULL_REQUEST_CREATED = 'pullrequest:created' + PULL_REQUEST_UPDATED = 'pullrequest:updated' + PULL_REQUEST_FULFILLED = 'pullrequest:fulfilled' + PULL_REQUEST_COMMENT_CREATED = 'pullrequest:comment_created' + + +class GitHubEventTypes(AxEnum): + """Event types of GitHub.""" + + CREATE = 'create' + PING = 'ping' + PUSH = 'push' + PULL_REQUEST = 'pull_request' + ISSUE_COMMENT = 'issue_comment' + + +class GitLabEventTypes(AxEnum): + """Event types of GitLab.""" + + PUSH = 'Push Hook' + MERGE_REQUEST = 'Merge Request Hook' + NOTES = 'Note Hook' + + +class AxCommands(AxEnum): + """Commands supported by AX.""" + + RERUN = 'rerun' + RUN = 'run' + + +TYPE_BITBUCKET = ScmVendors.BITBUCKET +TYPE_GITHUB = ScmVendors.GITHUB +TYPE_GITLAB = ScmVendors.GITLAB +TYPE_GIT = ScmVendors.GIT +TYPE_CODECOMMIT = ScmVendors.CODECOMMIT +SUPPORTED_TYPES = { + ScmVendors.BITBUCKET, + ScmVendors.GITHUB, + ScmVendors.GITLAB, + ScmVendors.GIT, + ScmVendors.CODECOMMIT +} + +DEFAULT_CONCURRENCY = 20 +DEFAULT_INTERVAL = 30 \ No newline at end of file diff --git a/devops/src/ax/devops/ci/event_translators/__init__.py b/devops/src/ax/devops/gateway/event_translators/__init__.py similarity index 85% rename from devops/src/ax/devops/ci/event_translators/__init__.py rename to devops/src/ax/devops/gateway/event_translators/__init__.py index 74b9e3c570c2..125eceb1a0ee 100644 --- a/devops/src/ax/devops/ci/event_translators/__init__.py +++ b/devops/src/ax/devops/gateway/event_translators/__init__.py @@ -1,9 +1,9 @@ import logging from ax.devops.ci.constants import ScmVendors -from ax.devops.ci.event_translators.bitbucket import BitBucketEventTranslator -from ax.devops.ci.event_translators.github import GitHubEventTranslator -from ax.devops.ci.event_translators.gitlab import GitLabEventTranslator +from .bitbucket import BitBucketEventTranslator +from .github import GitHubEventTranslator +from .gitlab import GitLabEventTranslator from ax.devops.exceptions import UnrecognizableVendor logger = logging.getLogger(__name__) diff --git a/devops/src/ax/devops/ci/event_translators/base.py b/devops/src/ax/devops/gateway/event_translators/base.py similarity index 100% rename from devops/src/ax/devops/ci/event_translators/base.py rename to devops/src/ax/devops/gateway/event_translators/base.py diff --git a/devops/src/ax/devops/ci/event_translators/bitbucket.py b/devops/src/ax/devops/gateway/event_translators/bitbucket.py similarity index 97% rename from devops/src/ax/devops/ci/event_translators/bitbucket.py rename to devops/src/ax/devops/gateway/event_translators/bitbucket.py index a0a5bd8f252e..6b4987337130 100644 --- a/devops/src/ax/devops/ci/event_translators/bitbucket.py +++ b/devops/src/ax/devops/gateway/event_translators/bitbucket.py @@ -2,9 +2,9 @@ import logging from ax.devops.ci.constants import AxEventTypes, BitBucketEventTypes, ScmTypes, ScmVendors -from ax.devops.ci.event_translators.base import BaseEventTranslator +from .base import BaseEventTranslator from ax.devops.exceptions import UnrecognizableEventType, UnsupportedSCMType -from ax.devops.scm_rest.bitbucket_client import BitBucketClient +from ax.devops.gateway.scm_rest.bitbucket_client import BitBucketClient from ax.devops.utility.utilities import utc from ax.notification_center import CODE_JOB_CI_INVALID_EVENT_TYPE, CODE_JOB_CI_INVALID_SCM_TYPE diff --git a/devops/src/ax/devops/ci/event_translators/github.py b/devops/src/ax/devops/gateway/event_translators/github.py similarity index 98% rename from devops/src/ax/devops/ci/event_translators/github.py rename to devops/src/ax/devops/gateway/event_translators/github.py index 9bfcda8172ee..ee928ec32051 100644 --- a/devops/src/ax/devops/ci/event_translators/github.py +++ b/devops/src/ax/devops/gateway/event_translators/github.py @@ -3,9 +3,9 @@ import random from ax.devops.ci.constants import AxEventTypes, GitHubEventTypes, ScmVendors -from ax.devops.ci.event_translators.base import BaseEventTranslator +from .base import BaseEventTranslator from ax.devops.exceptions import UnrecognizableEventType -from ax.devops.scm_rest.github_client import GitHubClient +from ax.devops.gateway.scm_rest.github_client import GitHubClient from ax.devops.utility.utilities import utc logger = logging.getLogger(__name__) diff --git a/devops/src/ax/devops/ci/event_translators/gitlab.py b/devops/src/ax/devops/gateway/event_translators/gitlab.py similarity index 97% rename from devops/src/ax/devops/ci/event_translators/gitlab.py rename to devops/src/ax/devops/gateway/event_translators/gitlab.py index 43a38c6b76bd..2bb831d6be9e 100644 --- a/devops/src/ax/devops/ci/event_translators/gitlab.py +++ b/devops/src/ax/devops/gateway/event_translators/gitlab.py @@ -2,9 +2,9 @@ import logging from ax.devops.ci.constants import AxEventTypes, GitLabEventTypes, ScmVendors -from ax.devops.ci.event_translators.base import BaseEventTranslator +from .base import BaseEventTranslator from ax.devops.exceptions import UnrecognizableEventType -from ax.devops.scm_rest.gitlab_client import GitLabClient +from ax.devops.gateway.scm_rest.gitlab_client import GitLabClient from ax.devops.utility.utilities import utc logger = logging.getLogger(__name__) diff --git a/devops/src/ax/devops/apps/workers/event_trigger.py b/devops/src/ax/devops/gateway/event_trigger.py similarity index 69% rename from devops/src/ax/devops/apps/workers/event_trigger.py rename to devops/src/ax/devops/gateway/event_trigger.py index e09b01fc7203..8d6e9a0a2202 100644 --- a/devops/src/ax/devops/apps/workers/event_trigger.py +++ b/devops/src/ax/devops/gateway/event_trigger.py @@ -1,20 +1,17 @@ -import argparse import logging import pprint import requests -import sys from retrying import retry from ax.devops.ci.event_trigger import EventTrigger as _EventTrigger -from ax.devops.kafka.kafka_client import ConsumerClient, ProducerClient +from ax.devops.kafka.kafka_client import ConsumerClient from ax.devops.settings import AxSettings -from ax.version import __version__ logger = logging.getLogger(__name__) event_trigger = _EventTrigger() -class EventTrigger(): +class EventTrigger(object): def __init__(self): """ @@ -75,20 +72,3 @@ def _report_status(service, message): resp.raise_for_status() except Exception as e: logger.warning('Failed to upload job result: %s', e) - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-l', '--log-level', dest='log_level', type=int, default=logging.INFO, help="specify log level") - parser.add_argument('--version', action='version', version="%(prog)s {}".format(__version__)) - args = parser.parse_args() - - logging.basicConfig(stream=sys.stdout, level=args.log_level, - format="%(asctime)s %(levelname)s %(lineno)d %(name)s: %(message)s", - datefmt="%Y-%m-%dT%H:%M:%S") - logging.getLogger('ax.devops.scm').setLevel(logging.WARNING) - logging.getLogger('ax.devops.kafka.kafka_client').setLevel(logging.WARNING) - logging.getLogger('kafka.conn').setLevel(logging.WARNING) - logging.getLogger('kafka.producer.kafka').setLevel(logging.WARNING) - event_trigger = EventTrigger() - event_trigger.run() diff --git a/devops/src/ax/devops/gateway/gateway.py b/devops/src/ax/devops/gateway/gateway.py new file mode 100644 index 000000000000..6ec12405fe09 --- /dev/null +++ b/devops/src/ax/devops/gateway/gateway.py @@ -0,0 +1,635 @@ +import boto3 +import json +import logging +import os +import re +import shutil +import socket + +from concurrent.futures import ThreadPoolExecutor, as_completed +from retrying import retry +try: + from urlparse import urlparse +except ImportError: + from urllib.parse import urlparse +from urllib.parse import unquote + +from .constants import AxEventTypes, ScmVendors, SUPPORTED_TYPES, DEFAULT_CONCURRENCY, DEFAULT_INTERVAL +from .scm_rest.bitbucket_client import BitBucketClient +from .scm_rest.github_client import GitHubClient +from .scm_rest.gitlab_client import GitLabClient +from .repo_manager import RepoManager +from .event_trigger import EventTrigger + +from ax.exceptions import AXApiInvalidParam, AXApiAuthFailed, AXApiForbiddenReq, AXApiInternalError +from ax.devops.axdb.axdb_client import AxdbClient +from ax.devops.axdb.axops_client import AxopsClient +from ax.devops.axsys.axsys_client import AxsysClient +from ax.devops.exceptions import AXScmException +from ax.devops.jira.jira_client import JiraClient +from ax.devops.kafka.kafka_client import EventNotificationClient +from ax.notification_center import FACILITY_GATEWAY +from ax.notification_center import CODE_JOB_CI_ELB_CREATION_FAILURE, CODE_JOB_CI_ELB_VERIFICATION_TIMEOUT, \ + CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, CODE_JOB_CI_ELB_DELETION_FAILURE, CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, \ + CODE_CONFIGURATION_SCM_CONNECTION_ERROR +from ax.devops.redis.redis_client import RedisClient, DB_REPORTING +from ax.devops.scm.scm import GitClient + + +logger = logging.getLogger(__name__) + + +class Gateway(object): + """Repo Controller""" + BASE_DIR = '/ax/data/repos' + BRANCH_CACHE_TTL = 5 * 60 # 5 minutes TTL as we expect we won't finish upgrade within 5 minutes + NAMESPACE = 'gateway' + + CLUSTER_NAME_ID = os.environ.get('AX_CLUSTER') + CUSTOMER_ID = os.environ.get('AX_CUSTOMER_ID') + S3_BUCKET_NAME = 'applatix-cluster-{account}-{seq}'.format(account=CUSTOMER_ID, seq=0) + s3_bucket = boto3.resource('s3').Bucket(S3_BUCKET_NAME) + + def __init__(self): + self.axdb_client = AxdbClient() + self.axops_client = AxopsClient() + self.axsys_client = AxsysClient() + self.redis_client = RedisClient('redis', db=DB_REPORTING) + self.event_notification_client = EventNotificationClient(FACILITY_GATEWAY) + self.scm_clients = { + ScmVendors.BITBUCKET: BitBucketClient(), + ScmVendors.GITHUB: GitHubClient(), + ScmVendors.GITLAB: GitLabClient() + } + self.repo_manager = RepoManager(DEFAULT_CONCURRENCY, DEFAULT_INTERVAL) + self.event_trigger = EventTrigger() + + def get_repos(self, scm_type, url, username, password): + """Get all repos owned by the user.""" + if scm_type in {ScmVendors.BITBUCKET, ScmVendors.GITHUB, ScmVendors.GITLAB}: + try: + repos = self.scm_clients[scm_type].get_repos(username, password) + except Exception as e: + logger.warning('Unable to connect to %s: %s', scm_type, e) + detail = { + 'type': scm_type, + 'username': username, + 'error': str(e.detail) + } + self.event_notification_client.send_message_to_notification_center(CODE_CONFIGURATION_SCM_CONNECTION_ERROR, + detail=detail) + raise AXApiInvalidParam('Cannot connect to %s server' % scm_type) + else: + return repos + elif scm_type == ScmVendors.GIT: + _, vendor, repo_owner, repo_name = Gateway.parse_repo(url) + path = '/tmp/{}/{}/{}'.format(vendor, repo_owner, repo_name) + if os.path.isfile(path): + os.remove(path) + if os.path.isdir(path): + shutil.rmtree(path) + os.makedirs(path) + client = GitClient(path=path, repo=url, username=username, password=password) + try: + client.list_remote() + except Exception as e: + logger.warning('Unable to connect to git server (%s): %s', url, e) + detail = { + 'type': scm_type, + 'url': url, + 'username': username, + 'error': str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_CONFIGURATION_SCM_CONNECTION_ERROR, + detail=detail) + raise AXApiInvalidParam('Cannot connect to git server') + else: + return {url: url} + elif scm_type == ScmVendors.CODECOMMIT: + repos = {} + region = 'us-east-1' + default_url_format = 'https://git-codecommit.{}.amazonaws.com/v1/repos/{}' + client = boto3.client('codecommit', aws_access_key_id=username, aws_secret_access_key=password, + region_name=region) + try: + response = client.list_repositories().get('repositories', []) + for r in response: + repo_url = default_url_format.format(region, r['repositoryName']) + repos[repo_url] = repo_url + except Exception as exc: + detail = { + 'type': scm_type, + 'region': region, + 'url': default_url_format.format(region, ''), + 'username': username, + 'error': 'Cannot connect to CodeCommit' + str(exc) + } + self.event_notification_client.send_message_to_notification_center(CODE_CONFIGURATION_SCM_CONNECTION_ERROR, + detail=detail) + raise AXApiInvalidParam('Cannot connect to CodeCommit: %s' % exc) + else: + return repos + else: + return {} + + @staticmethod + def parse_repo(repo): + """Parse repo url into 4-tuple (protocol, vendor, repo_owner, repo_name). + + :param repo: + :return: + """ + parsed_url = urlparse(repo) + protocol, vendor = parsed_url.scheme, parsed_url.hostname + m = re.match(r'/([a-zA-Z0-9\-]+)/([a-zA-Z0-9_.\-/]+)', parsed_url.path) + if not m: + raise AXScmException('Illegal repo URL', detail='Illegal repo URL ({})'.format(repo)) + repo_owner, repo_name = m.groups() + return protocol, vendor, repo_owner, repo_name + + def has_webhook(self, repo): + """Test if there is any repo which uses webhook. + + :param repo: + :return: + """ + tools = self.axops_client.get_tools(category='scm') + for i in range(len(tools)): + use_webhook = tools[i].get('use_webhook', False) + repos = set(tools[i].get('repos', [])) + repos -= {repo} + if use_webhook and repos: + return True + return False + + def get_webhook(self, vendor, repo): + """Get webhook + + :param vendor: + :param repo: + :returns: + """ + logger.info('Retrieving webhook (repo: %s) ...', repo) + return self.scm_clients[vendor].get_webhook(repo) + + def create_webhook(self, vendor, repo): + """Create webhook + + :param vendor: + :param repo: + :returns: + """ + + @retry(wait_fixed=5000, stop_max_delay=20 * 60 * 1000) + def _verify_elb(hostname): + try: + logger.info('Verifying ELB (%s) ...', hostname) + ip = socket.gethostbyname(hostname) + logger.info('Successfully resolved ELB (%s) to IP (%s)', hostname, ip) + except Exception as e: + logger.error('ELB not ready: %s', str(e)) + raise AXApiInternalError('ELB not ready', str(e)) + + ip_range = self.scm_clients[vendor].get_webhook_whitelist() + + # Create ELB + payload = {'ip_range': ip_range, 'external_port': 8443, 'internal_port': 8087} + try: + logger.info('Creating ELB for webhook ...') + result = self.axsys_client.create_webhook(**payload) + except Exception as e: + logger.error('Failed to create ELB for webhook: %s', str(e)) + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_CREATION_FAILURE, + detail=payload) + raise AXApiInternalError('Failed to create ELB for webhook', str(e)) + else: + logger.info('Successfully created ELB for webhook') + + # Verify ELB + hostname = result['hostname'] + try: + _verify_elb(hostname) + except Exception as e: + logger.error('Timed out on waiting for ELB to be available: %s', str(e)) + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_VERIFICATION_TIMEOUT, + detail={'hostname': hostname}) + raise AXApiInternalError('Timed out on waiting for ELB to be available: %s' % str(e)) + + # Create webhook + try: + logger.info('Creating webhook (repo: %s) ...', repo) + self.scm_clients[vendor].create_webhook(repo) + except AXApiAuthFailed as e: + logger.error('Invalid credential supplied') + detail = { + 'repo': repo, + 'error': 'Invalid credential supplied:' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, + detail=detail) + raise AXApiInvalidParam('User authentication failed', detail=str(e)) + except AXApiForbiddenReq as e: + logger.error('Supplied credential is valid but having insufficient permission') + detail = { + 'repo': repo, + 'error': 'Supplied credential is valid but having insufficient permission:' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, + detail=detail) + raise AXApiInvalidParam('User has insufficient permission', detail=str(e)) + except Exception as e: + logger.error('Failed to configure webhook: %s', e) + detail = { + 'repo': repo, + 'error': 'Failed to configure webhook:' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_CREATION_FAILURE, + detail=detail) + raise AXApiInternalError('Failed to configure webhook', str(e)) + else: + logger.info('Successfully created webhook (repo: %s)', repo) + return {} + + def delete_webhook(self, vendor, repo): + """Delete webhook + + :param vendor: + :param repo: + :returns: + """ + # Delete webhook + try: + logger.info('Deleting webhook (repo: %s) ...', repo) + self.scm_clients[vendor].delete_webhook(repo) + except AXApiAuthFailed as e: + logger.error('Invalid credential supplied') + detail = { + 'repo': repo, + 'error': 'Invalid credential supplied:' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, + detail=detail) + raise AXApiInvalidParam('User authentication failed', detail=str(e)) + except AXApiForbiddenReq as e: + logger.error('Supplied credential is valid but having insufficient permission') + detail = { + 'repo': repo, + 'error': 'Supplied credential is valid but having insufficient permission:' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, + detail=detail) + raise AXApiInvalidParam('User has insufficient permission', detail=str(e)) + except Exception as e: + logger.error('Failed to delete webhook: %s', e) + detail = { + 'repo': repo, + 'error': 'Failed to delete webhook:' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_WEBHOOK_DELETION_FAILURE, + detail=detail) + raise AXApiInternalError('Failed to delete webhook', str(e)) + else: + logger.info('Successfully deleted webhook (repo: %s)', repo) + + # Delete ELB + try: + if not self.has_webhook(repo): + logger.info('Deleting ELB for webhook ...') + self.axsys_client.delete_webhook() + except Exception as e: + logger.error('Failed to delete ELB for webhook: %s', str(e)) + detail = {'repo': repo, + 'error': 'Failed to delete ELB for webhook' + str(e) + } + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_DELETION_FAILURE, + detail=detail) + raise AXApiInternalError('Failed to delete ELB for webhook', str(e)) + else: + logger.info('Successfully deleted ELB for webhook') + return {} + + def purge_branches(self, repo, branch=None): + """Purge branch heads. + + :param repo: + :param branch: + :return: + """ + if not repo: + raise AXApiInvalidParam('Missing required parameter', 'Missing required parameter (repo)') + logger.info('Purging branch heads (repo: %s, branch: %s) ...', repo, branch) + + try: + if not branch: + self.axdb_client.purge_branch_heads(repo) + else: + self.axdb_client.purge_branch_head(repo, branch) + except Exception as e: + message = 'Unable to purge branch heads' + detail = 'Unable to purge branch heads (repo: {}, branch: {}): {}'.format(repo, branch, str(e)) + logger.error(detail) + raise AXApiInternalError(message, detail) + else: + logger.info('Successfully purged branch heads') + + def get_branches(self, repo=None, branch=None, order_by=None, limit=None): + """Get branches. + + :param repo: + :param branch: + :param order_by: + :param limit: + :return: + """ + + def _get_branches(workspace): + """Retrieve list of remote branches in the workspace. + + :param workspace: + :return: a list of dictionaries. + """ + try: + key = '{}:{}'.format(Gateway.NAMESPACE, workspace) + if self.redis_client.exists(key): + logger.info('Loading cache (workspace: %s) ...', workspace) + results = self.redis_client.get(key, decoder=json.loads) + return results + else: + logger.info('Scanning workspace (%s) ...', workspace) + git_client = GitClient(path=workspace, read_only=True) + repo = git_client.get_remote() + branches = git_client.get_remote_heads() + results = [] + for i in range(len(branches)): + results.append({ + 'repo': repo, + 'name': branches[i]['reference'], + 'revision': branches[i]['commit'], + 'commit_date': branches[i]['commit_date'] + }) + logger.info('Saving cache (workspace: %s) ...', workspace) + self.redis_client.set(key, results, expire=Gateway.BRANCH_CACHE_TTL, encoder=json.dumps) + return results + except Exception as e: + logger.warning('Failed to scan workspace (%s): %s', workspace, e) + return [] + + logger.info('Retrieving branches (repo: %s, branch: %s) ...', repo, branch) + if repo: + repo = unquote(repo) + _, vendor, repo_owner, repo_name = self.parse_repo(repo) + workspaces = ['{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name)] + else: + dirs = [dir_name[0] for dir_name in os.walk(Gateway.BASE_DIR) if dir_name[0].endswith('/.git')] + workspaces = list(map(lambda v: v[:-5], dirs)) + + branches = [] + with ThreadPoolExecutor(max_workers=20) as executor: + futures = [] + for i in range(len(workspaces)): + futures.append(executor.submit(_get_branches, workspaces[i])) + for future in as_completed(futures): + try: + data = future.result() + except Exception as e: + logger.warning('Unexpected exception occurred during processing: %s', e) + else: + for i in range(len(data)): + branches.append(data[i]) + if branch: + pattern = '.*{}.*'.format(branch.replace('*', '.*')) + branches = [branches[i] for i in range(len(branches)) if re.match(pattern, branches[i]['name'])] + if order_by == 'commit_date': + branches = sorted(branches, key=lambda v: v['commit_date']) + elif order_by == '-commit_date': + branches = sorted(branches, key=lambda v: v['commit_date'], reverse=True) + elif order_by == '-native': + branches = sorted(branches, key=lambda v: (v['repo'], v['name']), reverse=True) + else: + branches = sorted(branches, key=lambda v: (v['repo'], v['name'])) + if limit: + branches = branches[:limit] + logger.info('Successfully retrieved %s branches', len(branches)) + return branches + + @staticmethod + def _get_commits(workspace, branch=None, since=None, until=None, commit=None, author=None, committer=None, + description=None, limit=None): + """Search for commits in a workspace.""" + try: + logger.info('Scanning workspace (%s) for commits ...', workspace) + git_client = GitClient(path=workspace, read_only=True) + if commit and commit.startswith('~'): + commit = commit[1:] + if author and author.startswith('~'): + author = author[1:] + if committer and committer.startswith('~'): + committer = committer[1:] + if description and description.startswith('~'): + description = description[1:] + return git_client.get_commits(branch=branch, commit=commit, since=since, until=until, author=author, + committer=committer, description=description, limit=limit) + except Exception as e: + logger.warning('Failed to scan workspace (%s): %s', workspace, e) + + @staticmethod + def _get_commit(workspace, commit): + """Get a commit from a workspace.""" + try: + logger.info('Scanning workspace (%s) for commit (%s) ...', workspace, commit) + git_client = GitClient(path=workspace, read_only=True) + return git_client.get_commit(commit) + except Exception as e: + logger.warning('Failed to scan workspace (%s): %s', workspace, e) + + @staticmethod + def _parse_repo_branch(repo, branch, repo_branch): + """Parse repo / branch / repo_branch.""" + if repo: + try: + repo = unquote(repo) + _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo) + except Exception as e: + msg = 'Unable to parse repo: %s', e + logger.error(msg) + raise AXApiInvalidParam('Unable to parse repo', msg) + else: + dir = '{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name) + workspaces = {dir: [branch] if branch else []} + elif repo_branch: + try: + repo_branch = json.loads(repo_branch) + workspaces = {} + for repo in repo_branch.keys(): + repo = unquote(repo) + _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo) + dir = '{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name) + if dir not in workspaces: + workspaces[dir] = set() + for branch in repo_branch[repo]: + workspaces[dir].add(branch) + except Exception as e: + msg = 'Unable to parse repo_branch: %s' % str(e) + logger.error(msg) + raise AXApiInvalidParam('Unable to parse repo_branch', msg) + else: + dirs = [dir[0] for dir in os.walk(Gateway.BASE_DIR) if dir[0].endswith('/.git')] + workspaces = list(map(lambda v: v[:-5], dirs)) + workspaces = dict([(k, [branch] if branch else []) for k in workspaces]) + return workspaces + + @staticmethod + def _put_file(repo, branch, path): + """Put a file in s3. + + :param repo: + :param branch: + :param path: + :return: + """ + _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo) + workspace = '{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name) + if not os.path.isdir(workspace): + raise AXApiInvalidParam('Invalid repository', 'Invalid repository ({})'.format(repo)) + try: + logger.info('Extracting file content from repository (repo: %s, branch: %s, path: %s) ...', + repo, branch, path) + git_client = GitClient(path=workspace, read_only=True) + files = git_client.get_files(branch=branch, subdir=path, binary_mode=True) + except Exception as e: + message = 'Failed to extract file content' + detail = '{}: {}'.format(message, str(e)) + logger.error(detail) + raise AXApiInternalError(message, detail) + else: + if len(files) == 0: + raise AXApiInvalidParam('Unable to locate file with given information') + file_content = files[0]['content'] + logger.info('Successfully extracted file content') + + try: + # Cluster name id always has the form -<36_bytes_long_cluster_id> + cluster_name, cluster_id = Gateway.CLUSTER_NAME_ID[:-37], Gateway.CLUSTER_NAME_ID[-36:] + key = '{cluster_name}/{cluster_id}/{vendor}/{repo_owner}/{repo_name}/{branch}/{path}'.format( + cluster_name=cluster_name, cluster_id=cluster_id, vendor=vendor, + repo_owner=repo_owner, repo_name=repo_name, branch=branch, path=path) + logger.info('Uploading file content to s3 (bucket: %s, key: %s) ...', Gateway.S3_BUCKET_NAME, key) + response = Gateway.s3_bucket.Object(key).put(Body=file_content) + etag = response.get('ETag') + if etag: + etag = json.loads(etag) + except Exception as e: + message = 'Failed to upload file content' + detail = '{}: {}'.format(message, str(e)) + logger.error(detail) + raise AXApiInternalError(message, detail) + else: + logger.info('Successfully uploaded file content') + return {'bucket': Gateway.S3_BUCKET_NAME, 'key': key, 'etag': etag} + + @staticmethod + def _delete_file(repo, branch, path): + """Delete a file from s3. + + :param repo: + :param branch: + :param path: + :return: + """ + _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo) + try: + cluster_name, cluster_id = Gateway.CLUSTER_NAME_ID[:-37], Gateway.CLUSTER_NAME_ID[-36:] + key = '{cluster_name}/{cluster_id}/{vendor}/{repo_owner}/{repo_name}/{branch}/{path}'.format( + cluster_name=cluster_name, cluster_id=cluster_id, vendor=vendor, + repo_owner=repo_owner, repo_name=repo_name, branch=branch, path=path) + logger.info('Deleting file from s3 (bucket: %s, key: %s) ...', Gateway.S3_BUCKET_NAME, key) + Gateway.s3_bucket.Object(key).delete() + except Exception as e: + message = 'Failed to delete file' + detail = '{}: {}'.format(message, str(e)) + logger.error(detail) + raise AXApiInternalError(message, detail) + else: + logger.info('Successfully deleted file') + return {'bucket': Gateway.S3_BUCKET_NAME, 'key': key} + + @staticmethod + def init_jira_client(axops_client, url=None, username=None, password=None): + """Initialize an Jira client""" + + def get_jira_configuration(): + js = axops_client.get_tools(category='issue_management', type='jira') + if js: + return {'url': js[0]['url'], + 'username': js[0]['username'], + 'password': js[0]['password'] + } + else: + return dict() + + if url is None or username is None or password is None: + conf = get_jira_configuration() + if not conf: + raise AXApiInvalidParam('No JIRA configured') + else: + url, username, password = conf['url'], conf['username'], conf['password'] + return JiraClient(url, username, password) + + # Verify whether this function is still needed + def check_github_whitelist(self): + if not self.is_github_webhook_enabled(): + logger.info('No GitHub webhook configured') + return + configured = self.get_from_cache() + logger.info('The configured GitHub webhook whitelist is %s', configured) + advertised = self.scm_clients[ScmVendors.GITHUB].get_webhook_whitelist() + logger.info('The GitHub webhook whitelist is %s', advertised) + if set(configured) == set(advertised): + logger.info('No update needed') + else: + # Create ELB + payload = {'ip_range': advertised, 'external_port': 8443, 'internal_port': 8087} + try: + logger.info('Creating ELB for webhook ...') + self.axsys_client.create_webhook(**payload) + except Exception as exc: + logger.error('Failed to create ELB for webhook: %s', str(exc)) + self.event_notification_client.send_message_to_notification_center(CODE_JOB_CI_ELB_CREATION_FAILURE, + detail=payload) + else: + # Update cache + self.write_to_cache(advertised) + logger.info('Successfully updated ELB for webhook') + + def is_github_webhook_enabled(self): + """ Check whether the webhook is configured or not""" + github_data = self.axops_client.get_tools(type='github') + use_webhook = [each for each in github_data if each['use_webhook']] + return bool(use_webhook) + + @staticmethod + def write_to_cache(ip_range): + """ Store the webhook whitelist info""" + cache_file = '/tmp/github_webhook_whitelist' + with open(cache_file, 'w+') as f: + f.write(json.dumps(ip_range)) + + def get_from_cache(self): + """ Get cached webhook whitelist info, otherwise get from axmon""" + cache_file = '/tmp/github_webhook_whitelist' + ip_range = list() + if os.path.exists(cache_file): + with open(cache_file, 'r+') as f: + data = f.readlines() + ip_range = json.loads(data[0]) + else: + logger.debug('No cache file') + try: + data = self.axsys_client.get_webhook() + except Exception as exc: + logger.warning(exc) + else: + logger.info('Write whitelist info to cache file') + ip_range = data['ip_ranges'] + self.write_to_cache(ip_range) + return ip_range diff --git a/devops/src/ax/devops/gateway/main.py b/devops/src/ax/devops/gateway/main.py new file mode 100644 index 000000000000..c6396646d314 --- /dev/null +++ b/devops/src/ax/devops/gateway/main.py @@ -0,0 +1,74 @@ +from gevent import monkey +monkey.patch_all() + +from ax.util.az_patch import az_patch +az_patch() + +import argparse +import logging +import signal +import sys +import threading + +from gevent import pywsgi + +import ax.devops.gateway.rest as rest +from ax.version import __version__ +from ax.devops.gateway.gateway import Gateway +from ax.util.ax_signal import traceback_multithread +from . import GATEWAY_DEFAULT_PORT + +logger = logging.getLogger(__name__) + + +def signal_handler(signalnum, *args): + logger.info("Gateway killed with signal %s", signalnum) + sys.exit(0) + + +def signal_debugger(signal_num, frame): + logger.info("Gateway debugged with signal %s", signal_num) + result = traceback_multithread(signal_num, frame) + logger.info(result) + + +def main(): + parser = argparse.ArgumentParser(description='gateway') + parser.add_argument('--version', action='version', version="%(prog)s {}".format(__version__)) + parser.add_argument('--port', type=int, default=GATEWAY_DEFAULT_PORT, help="Run server on the specified port") + args = parser.parse_args() + + logging.basicConfig(format="%(asctime)s %(levelname)s %(name)s %(lineno)d %(threadName)s: %(message)s", + datefmt="%Y-%m-%dT%H:%M:%S", + stream=sys.stdout) + logging.getLogger("ax").setLevel(logging.DEBUG) + + signal.signal(signal.SIGTERM, signal_handler) + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGUSR1, signal_debugger) + + try: + rest.gateway = Gateway() + # Start repo manager process + logger.info("Starting Gateway repo_manager thread...") + repo_manager_thread = threading.Thread(target=rest.gateway.repo_manager.run, + name="repo_manager", + daemon=True) + repo_manager_thread.start() + # Start event trigger process + logger.info("Starting Gateway event_trigger thread...") + event_trigger_thread = threading.Thread(target=rest.gateway.event_trigger.run, + name="event_trigger", + daemon=True) + event_trigger_thread.start() + + # Start flask server + logger.info("Starting Flask server...") + server = pywsgi.WSGIServer(('', args.port), rest.app) + logger.info("Gateway %s serving on port %s", __version__, args.port) + server.serve_forever() + except SystemExit: + raise + except Exception as err: + logger.exception("Unhandled exception: %s", err) + sys.exit(1) diff --git a/devops/src/ax/devops/apps/workers/repo_manager.py b/devops/src/ax/devops/gateway/repo_manager.py similarity index 91% rename from devops/src/ax/devops/apps/workers/repo_manager.py rename to devops/src/ax/devops/gateway/repo_manager.py index a6cef5cebf95..585fee2503a3 100644 --- a/devops/src/ax/devops/apps/workers/repo_manager.py +++ b/devops/src/ax/devops/gateway/repo_manager.py @@ -1,10 +1,8 @@ -import argparse import datetime import logging import os import re import shutil -import sys import time from concurrent.futures import ThreadPoolExecutor, as_completed from retrying import retry @@ -17,13 +15,10 @@ from ax.devops.redis.redis_client import RedisClient from ax.devops.scm.scm import GitClient, CodeCommitClient from ax.devops.settings import AxSettings -from ax.version import __version__ logger = logging.getLogger(__name__) BASE_DIR = '/ax/data/repos' -DEFAULT_CONCURRENCY = 20 -DEFAULT_INTERVAL = 30 NAMESPACE = 'gateway' TEMPLATE_DIR = '.argo' @@ -335,23 +330,3 @@ def update_repo(self, repo_type, vendor, protocol, repo_owner, repo_name, userna logger.info('Successfully scanned repository (%s)', url) return len(heads_to_update) > 0 or len(prev_heads_map) > 0 - - -def main(): - parser = argparse.ArgumentParser() - parser.add_argument('-l', '--log-level', dest='log_level', type=int, default=logging.INFO, help="specify log level") - parser.add_argument('-c', '--concurrency', dest='concurrency', type=int, default=DEFAULT_CONCURRENCY, help="specify concurrency") - parser.add_argument('-i', '--interval', dest='interval', type=int, default=DEFAULT_INTERVAL, help="specify interval") - parser.add_argument('--version', action='version', version="%(prog)s {}".format(__version__)) - args = parser.parse_args() - - logging.basicConfig(stream=sys.stdout, level=args.log_level, - format="%(asctime)s %(levelname)s %(threadName)s %(lineno)d %(name)s: %(message)s", - datefmt="%Y-%m-%dT%H:%M:%S") - logging.getLogger('ax.devops.scm').setLevel(logging.WARNING) - logging.getLogger('ax.devops.kafka.kafka_client').setLevel(logging.WARNING) - logging.getLogger('kafka.conn').setLevel(logging.WARNING) - logging.getLogger('kafka.producer.kafka').setLevel(logging.WARNING) - - repos_manager = RepoManager(args.concurrency, args.interval) - repos_manager.run() diff --git a/devops/src/ax/devops/gateway/rest.py b/devops/src/ax/devops/gateway/rest.py new file mode 100644 index 000000000000..b3fc1313c101 --- /dev/null +++ b/devops/src/ax/devops/gateway/rest.py @@ -0,0 +1,881 @@ +import copy +import datetime +import jira +import json +import jwt +import logging +import os +import requests +import time + +from concurrent.futures import ThreadPoolExecutor, as_completed +from flask import Flask, request, jsonify, make_response, Response, redirect +from werkzeug.exceptions import BadRequest +from urllib.parse import unquote, urlparse + +from .gateway import Gateway +from .constants import AxEventTypes, ScmVendors, SUPPORTED_TYPES +from .event_translators import EventTranslator +from ax.devops.jira.jira_utils import translate_jira_issue_event +from ax.devops.kafka.kafka_client import ProducerClient +from ax.devops.redis.redis_client import RedisClient, DB_RESULT +from ax.devops.settings import AxSettings +from ax.devops.utility.utilities import AxPrettyPrinter, top_k, sort_str_dictionaries +from ax.exceptions import AXException, AXIllegalArgumentException, AXIllegalOperationException, \ + AXApiResourceNotFound, AXApiInvalidParam, AXApiInternalError +from ax.notification_center import CODE_JOB_CI_STATUS_REPORTING_FAILURE, CODE_JOB_CI_EVENT_CREATION_FAILURE, \ + CODE_JOB_CI_YAML_UPDATE_FAILURE, CODE_PLATFORM_ERROR + + +logger = logging.getLogger(__name__) + +app = Flask(__name__) +gateway = None + + +def get_json(): + """Helper to retrieve json from the request body, or raise AXApiInvalidParam if invalid""" + try: + return request.get_json(force=True) + except Exception: + raise AXApiInvalidParam("Invalid json supplied") + + +# For some reason, BadRequest is not handled by the generic Exception error handler, so both decorators are needed +@app.errorhandler(400) +@app.errorhandler(Exception) +def error_handler(error): + # Our exceptions and error handling is a complete mess. Need to clean this up and standardize across teams + if isinstance(error, AXException): + data = error.json() + if isinstance(error, (AXIllegalArgumentException, AXApiInvalidParam, AXIllegalOperationException)): + status_code = 400 + elif isinstance(error, AXApiResourceNotFound): + status_code = 404 + else: + logger.exception("Internal error") + status_code = 500 + else: + if isinstance(error, BadRequest): + logger.exception("Bad request") + code = AXApiInvalidParam.code + status_code = error.code + else: + logger.exception("Internal error") + code = "ERR_AX_INTERNAL" + status_code = 500 + data = {"code": code, + "message": str(error), + "detail": ""} + logger.warning('%s (status_code: %s): %s', error, status_code, data) + return make_response(jsonify(data), status_code) + + +@app.route('/v1/ping', methods=['GET']) +def ping(): + return Response('"pong"', mimetype='application/json') + + +@app.route('/v1/scm/test', methods=['POST']) +def test(): + """Test connection to SCM server.""" + payload = get_json() + scm_type = payload.get('type', '').lower() + url = payload.get('url', '').lower() + username = payload.get('username', None) + password = payload.get('password', None) + logger.info('Received request (type: %s, url: %s, username: %s, password: ******)', scm_type, url, username) + if not scm_type: + raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (type)') + if scm_type not in SUPPORTED_TYPES: + raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(scm_type)) + if scm_type == ScmVendors.GIT: + assert url, AXApiInvalidParam('Missing required parameters', + detail='Require parameter (url) when type is {}'.format(ScmVendors.GIT)) + else: + assert all([username, password]), AXApiInvalidParam('Missing required parameters', + detail='Required parameters (username, password, url)') + try: + repos = gateway.get_repos(scm_type=scm_type, url=url, username=username, password=password) + except Exception as e: + logger.warning('Failed to get repositories: %s', e) + raise AXApiInternalError('Failed to get repositories', detail=str(e)) + else: + return jsonify({'repos': repos}) + + +@app.route('/v1/scm/events', methods=['POST']) +def events(): + """Create a DevOps event.""" + payload, headers = get_json(), request.META + try: + logger.info('Translating SCM event ...') + event_list = EventTranslator.translate(payload, headers) + except Exception as e: + logger.error('Failed to translate event: %s', e) + # Todo Tianhe Issue: #330 comment out for now because it is distracting + # gateway.event_notification_client.send_message_to_notification_center( + # CODE_JOB_CI_EVENT_TRANSLATE_FAILURE, + # detail={'payload': payload, 'error': str(e)}) + raise AXApiInternalError('Failed to translate event', detail=str(e)) + else: + logger.info('Successfully translated event') + + kafka_client = ProducerClient() + successful_events = [] + for event in event_list: + if event['type'] == AxEventTypes.PING: + logger.info('Received a PING event, skipping service creation ...') + continue + else: + try: + logger.info('Creating AX event ...\n%s', AxPrettyPrinter().pformat(event)) + key = '{}_{}_{}'.format(event['repo'], event['branch'], event['commit']) + kafka_client.send(AxSettings.TOPIC_DEVOPS_CI_EVENT, key=key, value=event, timeout=120) + except Exception as e: + gateway.event_notification_client.send_message_to_notification_center( + CODE_JOB_CI_EVENT_CREATION_FAILURE, + detail={'event_type': event.get('type', 'UNKNOWN'), + 'error': str(e)}) + logger.warning('Failed to create AX event: %s', e) + else: + logger.info('Successfully created AX event') + successful_events.append(event) + kafka_client.close() + return Response(successful_events) + + +@app.route('/v1/scm/reports', methods=['POST']) +def reports(): + """Report build/test status to source control tool.""" + payload = get_json() + logger.info('Received reporting request (payload: %s)', payload) + report_id = payload.get('id') + repo = payload.get('repo') + if not report_id: + raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (id)') + + try: + if not repo: + cache = gateway.redis_client.get(report_id, decoder=json.loads) + repo = cache['repo'] + vendor = gateway.axops_client.get_tool(repo)['type'] + if vendor not in gateway.scm_clients.keys(): + raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(vendor)) + result = gateway.scm_clients[vendor].upload_job_result(payload) + if result == -1: + logger.info('GitHub does not support status report for the non-sha commits. Skip.') + except Exception as e: + logger.error('Failed to report status: %s', e) + gateway.event_notification_client.send_message_to_notification_center( + CODE_JOB_CI_STATUS_REPORTING_FAILURE, detail=payload) + raise AXApiInternalError('Failed to report status', detail=str(e)) + else: + logger.info('Successfully reported status') + return jsonify(result) + + +@app.route('/v1/scm/webhooks', methods=['GET', 'POST', 'DELETE']) +def get_create_delete_webhooks(): + """Create / delete a webhook.""" + payload = get_json() + + repo = payload.get('repo') + vendor = payload.get('type') + username = payload.get('username') + password = payload.get('password') + if not all([repo, vendor]): + raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (repo, type)') + if vendor not in gateway.scm_clients.keys(): + raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(vendor)) + + if username and password: + gateway.scm_clients[vendor].update_repo_info(repo, vendor, username, password) + if request.method == 'GET': + result = gateway.get_webhook(vendor, repo) + elif request.method == 'POST': + result = gateway.create_webhook(vendor, repo) + else: + result = gateway.delete_webhook(vendor, repo) + return jsonify(result) + + +@app.route('/v1/scm/yamls', methods=['POST']) +def post_yamls(): + """Update YAML contents (i.e. policy, template).""" + payload = get_json() + + vendor = payload.get('type') + repo = payload.get('repo') + branch = payload.get('branch') + if not all([vendor, repo, branch]): + raise AXApiInvalidParam('Missing required parameters', detail='Required parameters (type, repo, branch)') + if vendor not in gateway.scm_clients.keys(): + raise AXApiInvalidParam('Invalid parameter values', detail='Unsupported type ({})'.format(vendor)) + + try: + # The arrival of events may not always be in the natural order of commits. For + # example, the user may resent an old event from UI of source control tool. In + # this case, we may update the YAML contents to an older version. To avoid this, + # we guarantee that every YAML update will only update the content to the latest + # version on a branch. More specifically, whenever we receive an event, we extract + # the repo and branch information, and find the HEAD of the branch. Then, we use + # the commit of HEAD to retrieve the YAML content, and update policies/templates + # correspondingly. + scm_client = gateway.scm_clients[vendor] + commit = scm_client.get_branch_head(repo, branch) + yaml_files = scm_client.get_yamls(repo, commit) + logger.info('Updating YAML contents (policy/template) ...') + gateway.axops_client.update_yamls(repo, branch, commit, yaml_files) + except Exception as e: + logger.error('Failed to update YAML contents: %s', e) + gateway.event_notification_client.send_message_to_notification_center( + CODE_JOB_CI_YAML_UPDATE_FAILURE, detail={'vendor': vendor, + 'repo': repo, + 'branch': branch, + 'error': str(e)}) + raise AXApiInternalError('Failed to update YAML contents', str(e)) + else: + logger.info('Successfully updated YAML contents') + return jsonify({}) + + +@app.route('/v1/scm/branches', methods=['GET', 'DELETE']) +def get_delete_branches(): + """Query branches.""" + repo = request.args.get('repo') + branch = request.args.get('branch', "") or request.args.get('name', "") + if request.method == 'DELETE': + gateway.purge_branches(repo, branch) + return jsonify({}) + else: + if branch and branch.startswith('~'): + branch = branch[1:] + order_by = request.args.get('order_by', "") + limit = request.args.get('limit', "") + if limit: + limit = int(limit) + branches = gateway.get_branches(repo, branch, order_by, limit) + return jsonify({'data': branches}) + + +@app.route('/v1/scm/commits', methods=['GET', 'DELETE']) +def get_commits(): + """Query commits.""" + # Repo and branch are optional parameters that can always be used to reduce + # search scope. Repo is used to construct the path to the workspace so that + # the number of commands we issue can be significantly reduced. Branch can + # be used in every command to filter commits by reference (branch). + repo = request.args.get('repo') + branch = request.args.get('branch') + repo_branch = request.args.get('repo_branch') + if repo_branch and (repo or branch): + raise AXApiInvalidParam('Ambiguous query condition', + 'It is ambiguous to us to supply both repo_branch and repo/branch') + workspaces = gateway._parse_repo_branch(repo, branch, repo_branch) + + # If commit / revision is supplied, we will disrespect all other parameters. + # Also, we no longer use `git log` to issue query but use `git show` to directly + # show the commit information. + commit = request.args.get('commit') or request.args.get('revision') + + # Full-text search can be performed against 3 fields: author, committer, and description. + # To perform narrow search, specify `author=~&committer=~&description=~`. + # To perform broad search, specify `search=~`. + # Note that, in git, all queries are full-text search already, so we will strip off `~`. + search = request.args.get('search') + author = request.args.get('author', None) + committer = request.args.get('committer', None) + description = request.args.get('description', None) + if search: + use_broad_search = True + else: + use_broad_search = False + if author: + author = author.split(',') + else: + author = [None] + if committer: + committer = committer.split(',') + else: + committer = [None] + author_committer = [] + for i in range(len(author)): + for j in range(len(committer)): + author_committer.append([author[i], committer[j]]) + + # We use time-based pagination. min_time is converted to since and max_time is + # converted to until. Also, the time format seconds since epoch (UTC). + since = request.args.get('min_time') + until = request.args.get('max_time') + if since: + since = datetime.datetime.utcfromtimestamp(int(since)).strftime('%Y-%m-%dT%H:%M:%S') + if until: + until = datetime.datetime.utcfromtimestamp(int(until)).strftime('%Y-%m-%dT%H:%M:%S') + + # Limit specify the maximal records that we return. Fields specify the fields + # that we return. Sort allows the sorting of final results. + limit = request.args.get('limit') + fields = request.args.get('fields') + sorter = request.args.get('sort') + if limit: + limit = int(limit) + if fields: + fields = set(fields.split(',')) + if sorter: + sorters = sorter.split(',') + valid_keys = {'repo', 'revision', 'author', 'author_date', 'committer', 'commit_date', 'date', 'description'} + valid_sorters = [] + for i in range(len(sorters)): + key = sorters[i][1:] if sorters[i].startswith('-') else sorters[i] + if key in valid_keys: + valid_sorters.append(sorters[i]) + sorter = valid_sorters + + logger.info('Retrieving commits (repo: %s, branch: %s, commit: %s, limit: %s) ...', repo, branch, commit, limit) + + # Prepare arguments for workspace scanning + search_conditions = [] + for key in workspaces.keys(): + if not os.path.isdir(key): # If the workspace does not exist, we should skip scanning it + continue + elif commit: + search_conditions.append({'workspace': key, 'commit': commit}) + elif use_broad_search: + for j in range(len(author_committer)): + _author, _committer = author_committer[j][0], author_committer[j][1] + _search_dict = {'workspace': key, + 'branch': list(workspaces[key]), + 'since': since, + 'until': until, + 'limit': limit, + 'author': _author, + 'committer': _committer, + 'description': description, + } + for field in {'author', 'committer', 'description'}: + new_dict = copy.deepcopy(_search_dict) + new_dict[field] = search + search_conditions.append(new_dict) + else: + for j in range(len(author_committer)): + _author, _committer = author_committer[j][0], author_committer[j][1] + search_conditions.append({'workspace': key, 'branch': list(workspaces[key]), + 'author': _author, 'committer': _committer, 'description': description, + 'since': since, 'until': until, 'limit': limit}) + + # Scan workspaces + commits_list = [] + with ThreadPoolExecutor(max_workers=20) as executor: + futures = [] + for i in range(len(search_conditions)): + if commit: + futures.append(executor.submit(Gateway._get_commit, **search_conditions[i])) + else: + futures.append(executor.submit(Gateway._get_commits, **search_conditions[i])) + for future in as_completed(futures): + try: + data = future.result() + if data: + commits_list.append(data) + except Exception as e: + logger.warning('Unexpected exception occurred during processing: %s', e) + + if commit: + # If commit is supplied in the query, the return list is a list of commits, + # so we do not need to run top_k algorithm + top_commits = sorted(commits_list, key=lambda v: -v['date']) + else: + # Retrieve top k commits + top_commits = top_k(commits_list, limit, key=lambda v: -v['date']) + + # Sort commits + if sorter: + top_commits = sort_str_dictionaries(top_commits, sorter) + else: + top_commits = sorted(top_commits, key=lambda v: -v['date']) + + # Filter fields + for i in range(len(top_commits)): + for k in list(top_commits[i].keys()): + if fields is not None and k not in fields: + del top_commits[i][k] + logger.info('Successfully retrieved commits') + + return jsonify({'data': top_commits}) + + +@app.route('/v1/scm/commit/', methods=['GET']) +def get_commit(pk): + """Get a single commit.""" + + def get_commits_internal(commit_arg, repo_arg=None): + """Get commit(s) by commit hash.""" + # Normally, this function should return only 1 commit object. However, if a repo and its forked repo + # both appear in our workspace, there could be multiple commit objects. + + # If repo is not supplied, we need to scan all workspaces + if repo_arg: + _, vendor, repo_owner, repo_name = Gateway.parse_repo(repo_arg) + workspaces = ['{}/{}/{}/{}'.format(Gateway.BASE_DIR, vendor, repo_owner, repo_name)] + else: + dirs = [dir[0] for dir in os.walk(Gateway.BASE_DIR) if dir[0].endswith('/.git')] + workspaces = list(map(lambda v: v[:-5], dirs)) + + commits = [] + with ThreadPoolExecutor(max_workers=20) as executor: + futures = [] + for i in range(len(workspaces)): + futures.append(executor.submit(Gateway._get_commit, workspaces[i], commit=commit_arg)) + for future in as_completed(futures): + try: + data = future.result() + if data: + commits.append(data) + except Exception as e: + logger.warning('Unexpected exception occurred during processing: %s', e) + + return commits + + repo = request.args.get('repo', "") + if repo: + repo = unquote(repo) + logger.info('Retrieving commit (repo: %s, commit: %s) ...', repo, pk) + commits_res = get_commits_internal(pk, repo) + if not commits_res: + logger.warning('Failed to retrieve commit') + raise AXApiInvalidParam('Invalid revision', detail='Invalid revision ({})'.format(pk)) + else: + if len(commits_res) > 1: + logger.warning('Found multiple commits with given sha, returning the first one ...') + logger.info('Successfully retrieved commit') + return jsonify(commits_res[0]) + + +@app.route('/v1/scm/files', methods=['PUT', 'DELETE']) +def files(): + """Get a single file content and upload to s3.""" + repo = request.args.get('repo') + branch = request.args.get('branch') + path = request.args.get('path') + if not all([repo, branch, path]): + raise AXApiInvalidParam('Missing required parameters', 'Missing required parameters (repo, branch, path)') + if path.startswith('/'): + path = path[1:] + + if request.method == 'PUT': + resp = Gateway._put_file(repo, branch, path) + else: + resp = Gateway._delete_file(repo, branch, path) + return jsonify(resp) + + +# API for Argo Approval +@app.route('/v1/results//approval', methods=['GET', ]) +def approval(id): + """Save an approval result in redis.""" + token = request.args.get('token', None) + result = jwt.decode(token, 'ax', algorithms=['HS256']) + redis_client = RedisClient('redis', db=DB_RESULT) + result['timestamp'] = int(time.time()) + + logger.info("Decode token {}, \n to {}".format(token, json.dumps(result, indent=2))) + + # differentiate key for approval result from the task result + uuid = result['leaf_id'] + '-axapproval' + try: + logger.info("Setting approval result (%s) to Redis ...", uuid) + try: + state = gateway.axdb_client.get_approval_info(root_id=result['root_id'], leaf_id=result['leaf_id']) + if state and state[0]['result'] != 'WAITING': + return redirect("https://{}/error/404/type/ERR_AX_ILLEGAL_OPERATION;msg=The%20link%20is%20no%20longer%20valid.".format(result['dns'])) + + if gateway.axdb_client.get_approval_results(leaf_id=result['leaf_id'], user=result['user']): + return redirect("https://{}/error/404/type/ERR_AX_ILLEGAL_OPERATION;msg=Response%20has%20already%20been%20submitted.".format(result['dns'])) + + # push result to redis (brpop) + redis_client.rpush(uuid, value=result, encoder=json.dumps) + except Exception as exc: + logger.exception(exc) + pass + # save result to axdb + gateway.axdb_client.create_approval_results(leaf_id=result['leaf_id'], + root_id=result['root_id'], + result=result['result'], + user=result['user'], + timestamp=result['timestamp']) + except Exception as e: + msg = 'Failed to save approval result to Redis: {}'.format(e) + logger.error(msg) + raise + else: + logger.info('Successfully saved result to Redis') + return redirect("https://{}/success/201;msg=Response%20has%20been%20submitted%20successfully.".format(result['dns'])) + + +# API for Nexus +@app.route('/v1/results/test_nexus_credential', methods=['PUT', ]) +def test_nexus_credential(): + payload = get_json() + logger.info('Received testing request (payload: %s)', payload) + username = payload.get('username', "") + password = payload.get('password', "") + port = payload.get('port', 8081) + hostname = payload.get('hostname', None) + + if not hostname: + raise AXApiInvalidParam('Missing required parameters: Hostname', detail='Missing required parameters, hostname') + + response = requests.get('{}:{}/nexus/service/local/users'.format(hostname, port), + auth=(username, password), timeout=10) + + if response.ok: + return jsonify({}) + else: + response.raise_for_status() + + +# API for reporting platform notification +@app.route('/v1/results/redirect_notification_center', methods=['POST', ]) +def redirect_notification_center(): + payload = get_json() + logger.info('Received redirecting nc request (payload: %s)', payload) + detail = payload.get('detail', "") + try: + gateway.event_notification_client.send_message_to_notification_center( + CODE_PLATFORM_ERROR, detail={'message': detail}) + except Exception: + logger.exception("Failed to send event to notification center") + raise + return jsonify({}) + + +def _query_match(data, query_dict): + for k, v in query_dict.items(): + if data.get(k, None) != v: + return False + return True + + +def _normalize_data(proj_dict): + filtered_project_keys = ('id', 'key', 'name', 'projectTypeKey') + return dict([(k, proj_dict.get(k, None)) for k in filtered_project_keys]) + + +# APIs for JIRA +@app.route('/v1/jira/users', methods=['GET']) +def get_users(): + query_dict = dict() + jira_client = Gateway.init_jira_client(gateway.axops_client) + filtered_users_keys = ('key', 'active', 'fullname', 'email') + for pk in filtered_users_keys: + pv = request.args.get(pk, None) + if pk == 'active': + if pv == 'true': + pv = True + elif pv == 'false': + pv = False + if pv is not None: + query_dict[pk] = pv + + users = jira_client.users() + users = [u for u in users if _query_match(u, query_dict)] + return jsonify({'data': users}) + + +@app.route('/v1/jira/projects', methods=['GET']) +def get_projects(): + query_dict = dict() + jira_client = Gateway.init_jira_client(gateway.axops_client) + filtered_project_keys = ('id', 'key', 'name', 'projectTypeKey') + for pk in filtered_project_keys: + pv = request.args.get(pk, None) + if pv is not None: + query_dict[pk] = pv + + ps = jira_client.get_projects(json_result=True) + ps = [p for p in ps if _query_match(p, query_dict)] + ps = [_normalize_data(p) for p in ps] + return jsonify({'data': ps}) + + +@app.route('/v1/jira/projects/', methods=['GET']) +def get_project(name): + jira_client = Gateway.init_jira_client(gateway.axops_client) + proj = jira_client.get_project(name, json_result=True) + return jsonify(_normalize_data(proj)) + + +@app.route('/v1/jira/projects/test', methods=['POST']) +def test_project(): + payload = get_json() + url = payload.get('url', '').lower() + username = payload.get('username', None) + password = payload.get('password', None) + logger.info('Received request (url: %s, username: %s, password: ******)', url, username) + + assert all([url, username, password]), \ + AXApiInvalidParam('Missing required parameters', detail='Required parameters (username, password, url)') + + try: + Gateway.init_jira_client(url, username, password) + except requests.exceptions.ConnectionError as exc: + raise AXApiInternalError('Invalid URL', detail=str(exc)) + except jira.exceptions.JIRAError as exc: + raise AXApiInternalError('Invalid authentication', detail=str(exc)) + except Exception as exc: + raise AXApiInternalError('Failed to connect to JIRA', detail=str(exc)) + else: + return jsonify({}) + + +@app.route('/v1/jira/issues', methods=['POST']) +def create_issue(): + jira_client = Gateway.init_jira_client(gateway.axops_client) + payload = get_json() + logger.info('Received Jira issue creation request (%s)', payload) + project = payload.get('project', None) + summary = payload.get('summary', None) + issuetype = payload.get('issuetype', None) + reporter = payload.get('reporter', None) + + description = payload.get('description', None) # optional + + if project is None: + raise AXApiInvalidParam('Missing required parameters: Project', + detail='Missing required parameters, Project') + if summary is None: + raise AXApiInvalidParam('Missing required parameters: Summary', + detail='Missing required parameters, Summary') + if issuetype is None: + raise AXApiInvalidParam('Missing required parameters: Issuetype', + detail='Missing required parameters, Issuetype') + if reporter is None: + raise AXApiInvalidParam('Missing required parameters: Reporter', + detail='Missing required parameters, Reporter') + + try: + issue_obj = jira_client.create_issue(project, summary, + issuetype=issuetype, reporter=reporter, description=description) + except jira.exceptions.JIRAError as exc: + raise AXApiInternalError('Invalid Parameters', detail=str(exc)) + else: + issue_dict = copy.deepcopy(issue_obj.raw['fields']) + issue_dict['url'] = issue_obj.self + issue_dict['id'] = issue_obj.id + issue_dict['key'] = issue_obj.key + return jsonify(issue_dict) + + +@app.route('/v1/jira/issues', methods=['GET']) +def get_issues(): + jira_client = Gateway.init_jira_client(gateway.axops_client) + filtered_issue_keys = ('project', 'status', 'component', 'labels', 'issuetype', 'priority', + 'creator', 'assignee', 'reporter', 'fixversion', 'affectedversion') + query_ids = request.args.get('ids', None) + if query_ids is not None: + issues = [] + with ThreadPoolExecutor(max_workers=5) as executor: + futures = [] + id_list = query_ids.strip().split(',') + logger.info('Query the following Jira issues: %s', id_list) + for issue_id in id_list: + futures.append(executor.submit(jira_client.get_issue, issue_id.strip(), json_result=True)) + for future in as_completed(futures): + try: + issues.append(future.result()) + except Exception as exc: + logger.warning('Unexpected exception %s', exc) + else: + kwargs = dict() + for key in request.query_params.keys(): + if key.lower() in filtered_issue_keys: + kwargs[key.lower()] = request.query_params.get(key) + logger.info('Query kwargs: %s:', kwargs) + issues = jira_client.query_issues(json_result=True, **kwargs) + + return jsonify(issues) + + +@app.route('/v1/jira/issues/', methods=['GET']) +def get_issue(issue_id): + jira_client = Gateway.init_jira_client(gateway.axops_client) + issue = jira_client.get_issue(issue_id, json_result=True) + return jsonify(issue) + + +@app.route('/v1/jira/issues//getcomments', methods=['GET']) +def get_comments(issue_id): + jira_client = Gateway.init_jira_client(gateway.axops_client) + default_max_results = 3 + max_results = int(request.args.get('max_results', default_max_results)) + comments = jira_client.get_issue_comments(issue_id, latest_num=max_results, json_result=True) + return jsonify(comments) + + +@app.route('/v1/jira/issues//addcomments', methods=['POST']) +def add_comment(issue_id): + jira_client = Gateway.init_jira_client(gateway.axops_client) + payload = get_json() + comment = payload.get('comment', None) + user = payload.get('user', None) + + if not comment: + raise AXApiInvalidParam('Require Comment message info') + if not user: + raise AXApiInvalidParam('Require Commenter info') + + try: + jira_client.add_issue_comment(issue_id, comment, commenter=user) + except Exception as exc: + raise AXApiInternalError('Failed to add comment', detail=str(exc)) + return jsonify({}) + + +@app.route('/v1/jira/issuetypes', methods=['GET']) +def get_issue_types(): + jira_client = Gateway.init_jira_client(gateway.axops_client) + issue_types = jira_client.get_issue_types(json_result=True) + return jsonify({'data': issue_types}) + + +@app.route('/v1/jira/issuetypes/', methods=['GET']) +def get_issue_type(issue_type): + jira_client = Gateway.init_jira_client(gateway.axops_client) + issue_type = jira_client.get_issue_type_by_name(issue_type, json_result=True) + return jsonify(issue_type) + + +@app.route('/v1/jira/webhooks', methods=['GET']) +def get_webhooks(): + jira_client = Gateway.init_jira_client(gateway.axops_client) + ax_webhooks = jira_client.get_ax_webhooks() + return jsonify({'data': ax_webhooks}) + + +@app.route('/v1/jira/webhooks', methods=['POST']) +def create_webhook(): + payload = get_json() + logger.info('Received jira webhook creation request, %s', payload) + + url = payload.get('url', None) + username = payload.get('username', None) + password = payload.get('password', None) + webhook = payload.get('webhook', None) + projects = payload.get('projects', None) + + # Create ingress + try: + dnsname = urlparse(webhook).netloc + logger.info('Creating ingress for Jira webhook %s', dnsname) + gateway.axsys_client.create_ingress(dnsname) + except Exception as exc: + logger.error('Failed to create ingress for webhook: %s', str(exc)) + raise AXApiInternalError('Failed to create ingress for webhook', str(exc)) + else: + logger.info('Successfully created ingress for webhook') + + # Create webhook + jira_client = Gateway.init_jira_client(gateway.axops_client, url=url, username=username, password=password) + try: + if projects: + logger.info('Filtered projects are: %s', projects) + if type(projects) == str: + projects = json.loads(projects) + else: + logger.info('No project filter') + projects = None + wh = jira_client.create_ax_webhook(webhook, projects=projects) + except Exception as exc: + logger.exception(exc) + raise AXApiInternalError('Fail to create Jira webhooks', detail=str(exc)) + return jsonify(wh.json()) + + +@app.route('/v1/jira/webhooks', methods=['PUT']) +def modify_webhook(): + jira_client = Gateway.init_jira_client(gateway.axops_client) + payload = get_json() + projects = payload.get('projects', None) + logger.info('Received jira webhook update request, %s', payload) + # Update webhook + try: + if projects: + logger.info('Filtered projects are: %s', projects) + if type(projects) == str: + projects = json.loads(projects) + else: + logger.info('No project filter') + projects = None + jira_client.update_ax_webhook(projects) + except Exception as exc: + logger.exception(exc) + raise AXApiInternalError('Fail to update jira webhooks', detail=str(exc)) + else: + logger.info('Successfully updated Jira webhook') + return jsonify({}) + + +@app.route('/v1/jira/webhooks', methods=['DELETE']) +def delete_webhook(): + jira_client = Gateway.init_jira_client(gateway.axops_client) + wh = jira_client.get_ax_webhook() + if not wh: + logger.warning('No webhook on Jira server, ignore it') + return jsonify({}) + + # Delete ingress + try: + logger.info('Deleting ingress for Jira webhook %s', wh['url']) + gateway.axsys_client.delete_ingress(urlparse(wh['url']).netloc) + except Exception as exc: + logger.error('Failed to delete ingress for webhook: %s', str(exc)) + raise AXApiInternalError('Failed to delete ingress for webhook', str(exc)) + else: + logger.info('Successfully deleted ingress for webhook') + # Delete webhook + try: + jira_client.delete_ax_webhook() + except Exception as exc: + logger.exception(exc) + raise AXApiInternalError('Fail to delete jira webhooks', detail=str(exc)) + return jsonify({}) + + +@app.route('/v1/jira/events$', methods=['POST']) +def create_jira_event(): + checked_fields = ('description', 'project', 'status', 'summary', 'Key') + delete_event = 'jira:issue_deleted' + update_event = 'jira:issue_updated' + + payload = get_json() + try: + logger.info('Translating JIRA event ...') + event = translate_jira_issue_event(payload) + except Exception as exc: + logger.error('Failed to translate event: %s', exc) + raise AXApiInternalError('Failed to translate event', detail=str(exc)) + else: + logger.info('Successfully translated event: %s', event) + + try: + if event['type'] == delete_event: + logger.info('The following Jira field(s) get updated: %s', event['changed_fields']) + if event['status_category_id'] == 3: + logger.info('Jira issue %s is closed', event['id']) + logger.info('Delete Jira on AXDB %s', event['id']) + gateway.axops_client.delete_jira_issue(event['id']) + elif event['changed_fields'] and any(f in event['changed_fields'] for f in checked_fields): + logger.info('Update Jira content on AXDB ...') + gateway.axops_client.update_jira_issue(event['axdb_content']) + else: + logger.info('No Jira content need to be updated') + elif event['type'] == update_event: + logger.info('Delete Jira on AXDB %s', event['id']) + gateway.axops_client.delete_jira_issue(event['id']) + else: + logger.warning('Not supported event: (%s), ignore it', event['type']) + except Exception as exc: + raise AXApiInternalError('Failed to update JIRA content on AXDB', detail=str(exc)) + else: + return jsonify({}) diff --git a/devops/src/ax/devops/scm_rest/__init__.py b/devops/src/ax/devops/gateway/scm_rest/__init__.py similarity index 99% rename from devops/src/ax/devops/scm_rest/__init__.py rename to devops/src/ax/devops/gateway/scm_rest/__init__.py index 341e67024bf1..86b111acfad8 100644 --- a/devops/src/ax/devops/scm_rest/__init__.py +++ b/devops/src/ax/devops/gateway/scm_rest/__init__.py @@ -13,6 +13,7 @@ TEMPLATE_DIR = '.argo' + class BaseScmRestClient(metaclass=abc.ABCMeta): """Base REST API wrapper for SCM.""" diff --git a/devops/src/ax/devops/scm_rest/bitbucket_client.py b/devops/src/ax/devops/gateway/scm_rest/bitbucket_client.py similarity index 99% rename from devops/src/ax/devops/scm_rest/bitbucket_client.py rename to devops/src/ax/devops/gateway/scm_rest/bitbucket_client.py index a69c91f7b666..25067b5ac42e 100644 --- a/devops/src/ax/devops/scm_rest/bitbucket_client.py +++ b/devops/src/ax/devops/gateway/scm_rest/bitbucket_client.py @@ -6,7 +6,7 @@ from dateutil.tz import tzlocal from ax.devops.redis.redis_client import RedisClient, DB_REPORTING -from ax.devops.scm_rest import BaseScmRestClient +from . import BaseScmRestClient from . import TEMPLATE_DIR redis_client = RedisClient('redis', db=DB_REPORTING) diff --git a/devops/src/ax/devops/scm_rest/github_client.py b/devops/src/ax/devops/gateway/scm_rest/github_client.py similarity index 99% rename from devops/src/ax/devops/scm_rest/github_client.py rename to devops/src/ax/devops/gateway/scm_rest/github_client.py index b57917bfb8c8..3352877e8c71 100644 --- a/devops/src/ax/devops/scm_rest/github_client.py +++ b/devops/src/ax/devops/gateway/scm_rest/github_client.py @@ -6,7 +6,7 @@ from dateutil.tz import tzlocal from ax.devops.redis.redis_client import RedisClient, DB_REPORTING -from ax.devops.scm_rest import BaseScmRestClient +from . import BaseScmRestClient from . import TEMPLATE_DIR logger = logging.getLogger(__name__) diff --git a/devops/src/ax/devops/scm_rest/gitlab_client.py b/devops/src/ax/devops/gateway/scm_rest/gitlab_client.py similarity index 99% rename from devops/src/ax/devops/scm_rest/gitlab_client.py rename to devops/src/ax/devops/gateway/scm_rest/gitlab_client.py index f200ed7a4935..214cdc83aed6 100644 --- a/devops/src/ax/devops/scm_rest/gitlab_client.py +++ b/devops/src/ax/devops/gateway/scm_rest/gitlab_client.py @@ -6,7 +6,7 @@ from ax.devops.exceptions import UnknownRepository from ax.devops.redis.redis_client import RedisClient, DB_REPORTING -from ax.devops.scm_rest import BaseScmRestClient +from . import BaseScmRestClient from . import TEMPLATE_DIR logger = logging.getLogger(__name__) diff --git a/devops/test/ax/test/devops/e2e/gateway/__init__.py b/devops/test/ax/test/devops/e2e/gateway/__init__.py deleted file mode 100644 index e69de29bb2d1..000000000000 diff --git a/devops/test/ax/test/devops/e2e/gateway/conftest.py b/devops/test/ax/test/devops/e2e/gateway/conftest.py deleted file mode 100644 index bcf971d11772..000000000000 --- a/devops/test/ax/test/devops/e2e/gateway/conftest.py +++ /dev/null @@ -1,34 +0,0 @@ -# Copyright 2015-2017 Applatix, Inc. All rights reserved. - -import logging -import os -import pytest -import sys - -logging.basicConfig(format="%(asctime)s.%(msecs)03d %(levelname)s %(name)s %(threadName)s: %(message)s", - datefmt="%Y-%m-%dT%H:%M:%S", - level=logging.INFO, - stream=sys.stdout) - - -def pytest_addoption(parser): - parser.addoption("--concurrency", action="store", dest='concurrency', type=int, - default=20, help="Number of concurrent requests to be sent") - parser.addoption("--max-request", action="store", dest='max_request', type=int, - default=1000, help="Number of maximal requests to be sent") - - -@pytest.fixture -def gateway(): - hostname = 'gateway.axsys' if os.environ.get('KUBERNETES_SERVICE_HOST') else 'localhost' - return 'http://{}:8889'.format(hostname) - - -@pytest.fixture -def concurrency(request): - return request.config.getoption('--concurrency') - - -@pytest.fixture -def max_request(request): - return request.config.getoption('--max-request') diff --git a/devops/test/ax/test/devops/e2e/gateway/test_gateway.py b/devops/test/ax/test/devops/e2e/gateway/test_gateway.py deleted file mode 100644 index 12453e73ea4a..000000000000 --- a/devops/test/ax/test/devops/e2e/gateway/test_gateway.py +++ /dev/null @@ -1,51 +0,0 @@ -# Copyright 2015-2017 Applatix, Inc. All rights reserved. - -import logging -import requests -import time -from concurrent.futures import ThreadPoolExecutor, as_completed - -logger = logging.getLogger(__name__) -logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.ERROR) - - -def test_get_branches(gateway, concurrency, max_request): - """Stress test against branch API on gateway - - :param gateway: - :param concurrency: - :param max_request: - :returns: - """ - - def get_branches(sn): - """Get branches - - :param sn: - :returns: - """ - logger.info('Retrieving branches (sn: %s) ...', sn) - start_time = time.time() - try: - resp = requests.get('{}/v1/scm/branches'.format(gateway)) - except Exception as e: - logger.error('Failed to retrieve branches (sn: %s): %s', sn, str(e)) - else: - branches = resp.json()['data'] - end_time = time.time() - logger.info('Successfully retrieved %s branches (sn: %s) in %s seconds', len(branches), sn, end_time - start_time) - return branches - - start_time = time.time() - - count = 0 - with ThreadPoolExecutor(max_workers=concurrency) as executor: - futures = [] - for i in range(max_request): - futures.append(executor.submit(get_branches, i)) - for future in as_completed(futures): - if future.result(): - count += 1 - - end_time = time.time() - logger.info('Totally spent %s seconds to process %s requests', end_time - start_time, count) diff --git a/platform/config/service/mvc/gateway-svc.yml.in b/platform/config/service/mvc/gateway-svc.yml.in index ced22b881f9a..479b12d365a5 100644 --- a/platform/config/service/mvc/gateway-svc.yml.in +++ b/platform/config/service/mvc/gateway-svc.yml.in @@ -61,38 +61,17 @@ spec: cpu: 200m memory: 300Mi limits: - cpu: 400m + cpu: 800m memory: 600Mi volumeMounts: - mountPath: /ax/data name: gateway-persistent-storage livenessProbe: httpGet: - path: / + path: /v1/ping port: 8889 initialDelaySeconds: 600 periodSeconds: 120 - - name: repomanager - image: ${REGISTRY}/${NAMESPACE}/repomanager:${VERSION} - resources: - requests: - cpu: 150m - memory: 400Mi - limits: - cpu: 200m - memory: 600Mi - volumeMounts: - - mountPath: /ax/data - name: gateway-persistent-storage - - name: axeventtrigger - image: ${REGISTRY}/${NAMESPACE}/axeventtrigger:${VERSION} - resources: - requests: - cpu: 75m - memory: 200Mi - limits: - cpu: 200m - memory: 400Mi nodeSelector: ax.tier: master imagePullSecrets: diff --git a/platform/config/service/standard/gateway-svc.yml.in b/platform/config/service/standard/gateway-svc.yml.in index b2a8957171ab..0233c34cb658 100644 --- a/platform/config/service/standard/gateway-svc.yml.in +++ b/platform/config/service/standard/gateway-svc.yml.in @@ -62,38 +62,17 @@ spec: cpu: 200m memory: 300Mi limits: - cpu: 400m + cpu: 800m memory: 600Mi volumeMounts: - mountPath: /ax/data name: gateway-persistent-storage livenessProbe: httpGet: - path: / + path: /v1/ping port: 8889 initialDelaySeconds: 600 periodSeconds: 120 - - name: repomanager - image: ${REGISTRY}/${NAMESPACE}/repomanager:${VERSION} - resources: - requests: - cpu: 150m - memory: 400Mi - limits: - cpu: 200m - memory: 600Mi - volumeMounts: - - mountPath: /ax/data - name: gateway-persistent-storage - - name: axeventtrigger - image: ${REGISTRY}/${NAMESPACE}/axeventtrigger:${VERSION} - resources: - requests: - cpu: 75m - memory: 200Mi - limits: - cpu: 200m - memory: 400Mi nodeSelector: ax.tier: applatix imagePullSecrets: diff --git a/platform/tests/kubeobject/testdata/gateway-svc.yml.in b/platform/tests/kubeobject/testdata/gateway-svc.yml.in index caa0cb500c17..f473fe35f914 100644 --- a/platform/tests/kubeobject/testdata/gateway-svc.yml.in +++ b/platform/tests/kubeobject/testdata/gateway-svc.yml.in @@ -63,19 +63,7 @@ spec: cpu: 200m memory: 300Mi limits: - cpu: 400m - memory: 600Mi - volumeMounts: - - mountPath: /ax/data - name: gateway-persistent-storage - - name: repomanager - image: ${REGISTRY}/${NAMESPACE}/repomanager:${VERSION} - resources: - requests: - cpu: 150m - memory: 400Mi - limits: - cpu: 200m + cpu: 800m memory: 600Mi volumeMounts: - mountPath: /ax/data