From 0cc8910e8f130ca1fdfc3b2330d1da41715461a6 Mon Sep 17 00:00:00 2001 From: Donny Peeters <46660228+Donnype@users.noreply.github.com> Date: Wed, 12 Jul 2023 14:58:35 +0200 Subject: [PATCH 01/18] Use fix multiprocessing bug on macOS where `qsize()` is not implemented (#1374) Co-authored-by: ammar92 Co-authored-by: noamblitz <43830693+noamblitz@users.noreply.github.com> --- boefjes/boefjes/app.py | 38 +++++++++++++++++++------------------- boefjes/tests/conftest.py | 35 +++++++++++++++-------------------- boefjes/tests/test_app.py | 12 ++++-------- 3 files changed, 38 insertions(+), 47 deletions(-) diff --git a/boefjes/boefjes/app.py b/boefjes/boefjes/app.py index 07a71391915..f93f504e696 100644 --- a/boefjes/boefjes/app.py +++ b/boefjes/boefjes/app.py @@ -3,7 +3,7 @@ import os import signal import time -from typing import Callable, Dict, List +from typing import Dict, List, Tuple from pydantic import ValidationError from requests import HTTPError @@ -27,26 +27,27 @@ class SchedulerWorkerManager(WorkerManager): def __init__( self, item_handler: Handler, - client_factory: Callable[[], SchedulerClientInterface], + scheduler_client: SchedulerClientInterface, settings: Settings, log_level: str, # TODO: (re)move? ): self.item_handler = item_handler - self.client_factory = client_factory - self.scheduler_client = client_factory() + self.scheduler_client = scheduler_client self.settings = settings - self.task_queue = mp.Queue() - self.handling_tasks = mp.Manager().dict() + manager = mp.Manager() + + self.task_queue = manager.Queue() # multiprocessing.Queue() will not work on macOS, see mp.Queue.qsize() + self.handling_tasks = manager.dict() + self.workers = [] logger.setLevel(log_level) def run(self, queue_type: WorkerManager.Queue) -> None: logger.info("Created worker pool for queue '%s'", queue_type.value) - self.worker_args = (self.task_queue, self.item_handler, self.client_factory, self.handling_tasks) self.workers = [ - mp.Process(target=_start_working, args=self.worker_args) for _ in range(self.settings.pool_size) + mp.Process(target=_start_working, args=self._worker_args()) for _ in range(self.settings.pool_size) ] for worker in self.workers: worker.start() @@ -132,7 +133,7 @@ def _check_workers(self) -> None: new_workers = [] for worker in self.workers: - if worker.is_alive(): + if not worker._closed and worker.is_alive(): new_workers.append(worker) continue @@ -140,10 +141,11 @@ def _check_workers(self) -> None: "Worker[pid=%s, %s] not alive, creating new worker...", worker.pid, _format_exit_code(worker.exitcode) ) - self._cleanup_pending_worker_task(worker) - worker.close() + if not worker._closed: # Closed workers do not have a pid, so cleaning up would fail + self._cleanup_pending_worker_task(worker) + worker.close() - new_worker = mp.Process(target=_start_working, args=self.worker_args) + new_worker = mp.Process(target=_start_working, args=self._worker_args()) new_worker.start() new_workers.append(new_worker) @@ -170,6 +172,9 @@ def _cleanup_pending_worker_task(self, worker: mp.Process) -> None: except HTTPError: logger.exception("Could not get scheduler task[id=%s]", handling_task_id) + def _worker_args(self) -> Tuple: + return self.task_queue, self.item_handler, self.scheduler_client, self.handling_tasks + def exit(self, queue_type: WorkerManager.Queue): if not self.task_queue.empty(): items: List[QueuePrioritizedItem] = [self.task_queue.get() for _ in range(self.task_queue.qsize())] @@ -200,10 +205,9 @@ def _format_exit_code(exitcode: int) -> str: def _start_working( task_queue: mp.Queue, handler: Handler, - client_factory: Callable[[], SchedulerClientInterface], + scheduler_client: SchedulerClientInterface, handling_tasks: Dict[int, str], ): - scheduler_client = client_factory() logger.info("Started listening for tasks from worker[pid=%s]", os.getpid()) while True: @@ -228,10 +232,6 @@ def _start_working( def get_runtime_manager(settings: Settings, queue: WorkerManager.Queue, log_level: str) -> WorkerManager: - # Not a lambda since multiprocessing tries and fails to pickle lambda's - def client_factory(): - return SchedulerAPIClient(settings.scheduler_api) - if queue is WorkerManager.Queue.BOEFJES: item_handler = BoefjeHandler(LocalBoefjeJobRunner(get_local_repository()), get_local_repository()) else: @@ -239,7 +239,7 @@ def client_factory(): return SchedulerWorkerManager( item_handler, - client_factory, # Do not share a session between workers + SchedulerAPIClient(settings.scheduler_api), # Do not share a session between workers settings, log_level, ) diff --git a/boefjes/tests/conftest.py b/boefjes/tests/conftest.py index df5768331c1..c838a9a8b03 100644 --- a/boefjes/tests/conftest.py +++ b/boefjes/tests/conftest.py @@ -1,7 +1,7 @@ import multiprocessing import time from datetime import datetime, timezone -from multiprocessing import Queue as MultiprocessingQueue +from multiprocessing import Manager from pathlib import Path from typing import Dict, List, Optional, Tuple, Union @@ -15,10 +15,6 @@ from boefjes.runtime_interfaces import Handler, WorkerManager from tests.stubs import get_dummy_data -_tasks = multiprocessing.Manager().dict() -_popped_items = multiprocessing.Manager().dict() -_pushed_items = multiprocessing.Manager().dict() - class MockSchedulerClient(SchedulerClientInterface): def __init__( @@ -29,7 +25,7 @@ def __init__( log_path: Path, raise_on_empty_queue: Exception = KeyboardInterrupt, iterations_to_wait_for_exception: int = 0, - sleep_time: int = 0.05, + sleep_time: int = 0.1, ): self.queue_response = queue_response self.boefje_responses = boefje_responses @@ -40,11 +36,12 @@ def __init__( self.sleep_time = sleep_time self._iterations = 0 - self._tasks: Dict[str, Task] = _tasks - self._popped_items: Dict[str, QueuePrioritizedItem] = _popped_items - self._pushed_items: Dict[str, Tuple[str, QueuePrioritizedItem]] = _pushed_items + self._tasks: Dict[str, Task] = multiprocessing.Manager().dict() + self._popped_items: Dict[str, QueuePrioritizedItem] = multiprocessing.Manager().dict() + self._pushed_items: Dict[str, Tuple[str, QueuePrioritizedItem]] = multiprocessing.Manager().dict() def get_queues(self) -> List[Queue]: + time.sleep(self.sleep_time) return parse_raw_as(List[Queue], self.queue_response) def pop_item(self, queue: str) -> Optional[QueuePrioritizedItem]: @@ -97,7 +94,7 @@ def push_item(self, queue_id: str, p_item: QueuePrioritizedItem) -> None: class MockHandler(Handler): def __init__(self, exception=Exception): self.sleep_time = 0 - self.queue = MultiprocessingQueue() + self.queue = Manager().Queue() self.exception = exception def handle(self, item: Union[BoefjeMeta, NormalizerMeta]): @@ -118,13 +115,11 @@ def item_handler(tmp_path: Path): @pytest.fixture def manager(item_handler: MockHandler, tmp_path: Path) -> SchedulerWorkerManager: - def client_factory(): - return MockSchedulerClient( - get_dummy_data("scheduler/queues_response.json"), - 2 * [get_dummy_data("scheduler/pop_response_boefje.json")] - + [get_dummy_data("scheduler/should_crash.json")], - [get_dummy_data("scheduler/pop_response_normalizer.json")], - tmp_path / "patch_task_log", - ) - - return SchedulerWorkerManager(item_handler, client_factory, Settings(pool_size=1, poll_interval=0.01), "DEBUG") + scheduler_client = MockSchedulerClient( + get_dummy_data("scheduler/queues_response.json"), + 2 * [get_dummy_data("scheduler/pop_response_boefje.json")] + [get_dummy_data("scheduler/should_crash.json")], + [get_dummy_data("scheduler/pop_response_normalizer.json")], + tmp_path / "patch_task_log", + ) + + return SchedulerWorkerManager(item_handler, scheduler_client, Settings(pool_size=1, poll_interval=0.01), "DEBUG") diff --git a/boefjes/tests/test_app.py b/boefjes/tests/test_app.py index 734cabf9552..df16e7c695a 100644 --- a/boefjes/tests/test_app.py +++ b/boefjes/tests/test_app.py @@ -1,5 +1,5 @@ import json -from multiprocessing import Queue +from multiprocessing import Manager from pathlib import Path import pytest @@ -29,7 +29,7 @@ def test_one_process(manager: SchedulerWorkerManager, item_handler: MockHandler) def test_two_processes(manager: SchedulerWorkerManager, item_handler: MockHandler) -> None: manager.settings.pool_size = 2 - manager.task_queue = Queue(maxsize=2) + manager.task_queue = Manager().Queue() with pytest.raises(KeyboardInterrupt): manager.run(WorkerManager.Queue.BOEFJES) @@ -50,7 +50,6 @@ def test_two_processes_exception(manager: SchedulerWorkerManager, item_handler: [get_dummy_data("scheduler/pop_response_normalizer.json")], tmp_path / "patch_task_log", ) - manager.client_factory = lambda: manager.scheduler_client manager.settings.pool_size = 2 with pytest.raises(KeyboardInterrupt): @@ -67,10 +66,9 @@ def test_two_processes_handler_exception(manager: SchedulerWorkerManager, item_h [get_dummy_data("scheduler/pop_response_normalizer.json")], tmp_path / "patch_task_log", ) - manager.client_factory = lambda: manager.scheduler_client manager.settings.pool_size = 2 - manager.task_queue = Queue(maxsize=2) + manager.task_queue = Manager().Queue() with pytest.raises(KeyboardInterrupt): manager.run(WorkerManager.Queue.BOEFJES) @@ -101,9 +99,8 @@ def test_two_processes_cleanup_unfinished_tasks( [], tmp_path / "patch_task_log", ) - manager.client_factory = lambda: manager.scheduler_client manager.settings.pool_size = 2 - manager.task_queue = Queue(maxsize=2) + manager.task_queue = Manager().Queue() item_handler.sleep_time = 200 @@ -143,7 +140,6 @@ def test_null(manager: SchedulerWorkerManager, tmp_path: Path, item_handler: Moc tmp_path / "patch_task_log", iterations_to_wait_for_exception=2, ) - manager.client_factory = lambda: manager.scheduler_client with pytest.raises(KeyboardInterrupt): manager.run(WorkerManager.Queue.BOEFJES) From 43b51971433dca2b7486d048d08450a25b546dd1 Mon Sep 17 00:00:00 2001 From: Roelof Korporaal Date: Wed, 12 Jul 2023 15:43:57 +0200 Subject: [PATCH 02/18] Add buttons to manual rerun tasks, both boefjes or normalizers (#1339) Co-authored-by: Patrick Co-authored-by: ammar92 --- rocky/katalogus/views/mixins.py | 18 +++++++- rocky/rocky/scheduler.py | 5 +- rocky/rocky/templates/tasks/boefjes.html | 4 ++ rocky/rocky/templates/tasks/normalizers.html | 4 ++ rocky/rocky/views/tasks.py | 48 +++++++++++++++++++- 5 files changed, 73 insertions(+), 6 deletions(-) diff --git a/rocky/katalogus/views/mixins.py b/rocky/katalogus/views/mixins.py index 279a162964b..5be745f5b80 100644 --- a/rocky/katalogus/views/mixins.py +++ b/rocky/katalogus/views/mixins.py @@ -14,7 +14,7 @@ from katalogus.client import KATalogusClientV1, Plugin, get_katalogus from octopoes.models import OOI from rocky.exceptions import ClearanceLevelTooLowException, IndemnificationNotPresentException -from rocky.scheduler import Boefje, BoefjeTask, QueuePrioritizedItem, client +from rocky.scheduler import Boefje, BoefjeTask, Normalizer, NormalizerTask, QueuePrioritizedItem, RawData, client from rocky.views.mixins import OctopoesView logger = getLogger(__name__) @@ -60,6 +60,21 @@ def is_required_field(self, field: str) -> bool: return self.plugin_schema and field in self.plugin_schema.get("required", []) +class NormalizerMixin: + """ + When a user wants to run a normalizer on a given set of raw data, + this mixin provides the method to construct the normalizer task for that data and run it. + """ + + def run_normalizer(self, normalizer: Plugin, raw_data: RawData) -> None: + normalizer_task = NormalizerTask( + id=uuid4().hex, normalizer=Normalizer(id=normalizer.id, version=None), raw_data=raw_data + ) + + item = QueuePrioritizedItem(id=normalizer_task.id, priority=1, data=normalizer_task) + client.push_task(f"normalizer-{self.organization.code}", item) + + class BoefjeMixin(OctopoesView): """ When a user wants to scan one or multiple OOI's, @@ -75,7 +90,6 @@ def run_boefje(self, katalogus_boefje: Plugin, ooi: Optional[OOI]) -> None: ) item = QueuePrioritizedItem(id=boefje_task.id, priority=1, data=boefje_task) - logger.info("Item: %s", item.json()) client.push_task(f"boefje-{self.organization.code}", item) def run_boefje_for_oois( diff --git a/rocky/rocky/scheduler.py b/rocky/rocky/scheduler.py index 10331e989c1..13c970d7bc3 100644 --- a/rocky/rocky/scheduler.py +++ b/rocky/rocky/scheduler.py @@ -192,9 +192,10 @@ def get_lazy_task_list( boefje_name=boefje_name, ) - def get_task_details(self, task_id): + def get_task_details(self, task_id) -> Task: res = self.session.get(f"{self._base_uri}/tasks/{task_id}") - return res.json() + res.raise_for_status() + return Task.parse_raw(res.content) def push_task(self, queue_name: str, prioritized_item: QueuePrioritizedItem) -> None: res = self.session.post(f"{self._base_uri}/queues/{queue_name}/push", data=prioritized_item.json()) diff --git a/rocky/rocky/templates/tasks/boefjes.html b/rocky/rocky/templates/tasks/boefjes.html index f7f1bd22fe7..94faf6e7eea 100644 --- a/rocky/rocky/templates/tasks/boefjes.html +++ b/rocky/rocky/templates/tasks/boefjes.html @@ -28,6 +28,7 @@

{% translate "Boefjes" %}

{% translate "Status" %} {% translate "Created date" %} {% translate "Download task details" %} + {% translate "Reschedule" %} @@ -58,6 +59,9 @@

{% translate "Boefjes" %}

{% translate "Download meta data" %} {% endif %} + + {% include "partials/single_action_form.html" with btn_text="Reschedule" action="reschedule_task" key="task_id" value=task.id btn_class="ghost" %} + {% endfor %} diff --git a/rocky/rocky/templates/tasks/normalizers.html b/rocky/rocky/templates/tasks/normalizers.html index 6cdc3890e2c..1637789cffa 100644 --- a/rocky/rocky/templates/tasks/normalizers.html +++ b/rocky/rocky/templates/tasks/normalizers.html @@ -29,6 +29,7 @@

{% translate "Normalizers" %}

{% translate "Status" %} {% translate "Created date" %} {% translate "Download task details" %} + {% translate "Reschedule" %} @@ -60,6 +61,9 @@

{% translate "Normalizers" %}

{% translate "Download meta data" %} {% endif %} + + {% include "partials/single_action_form.html" with btn_text="Reschedule" action="reschedule_task" key="task_id" value=task.id btn_class="ghost" %} + {% endfor %} diff --git a/rocky/rocky/views/tasks.py b/rocky/rocky/views/tasks.py index 0526e07d72c..73b6c5c96df 100644 --- a/rocky/rocky/views/tasks.py +++ b/rocky/rocky/views/tasks.py @@ -1,5 +1,7 @@ import json +import uuid from datetime import datetime +from enum import Enum from account.mixins import OrganizationView from django.contrib import messages @@ -8,6 +10,7 @@ from django.urls import reverse from django.utils.translation import gettext_lazy as _ from django.views.generic.list import ListView +from katalogus.views.mixins import BoefjeMixin, NormalizerMixin from requests import HTTPError from rocky.scheduler import client @@ -15,6 +18,10 @@ TASK_LIMIT = 50 +class PageActions(Enum): + RESCHEDULE_TASK = "reschedule_task" + + class DownloadTaskDetail(OrganizationView): def get(self, request, *args, **kwargs): task_id = kwargs["task_id"] @@ -73,6 +80,43 @@ def get_queryset(self): messages.add_message(self.request, messages.ERROR, error_message) return [] + def post(self, request, *args, **kwargs): + if "action" in self.request.POST: + self.handle_page_action(request.POST["action"]) + if request.POST["action"] == PageActions.RESCHEDULE_TASK.value: + task_id = self.request.POST.get("task_id") + task = client.get_task_details(task_id) + if task.type == "normalizer": + return redirect( + reverse("normalizers_task_list", kwargs={"organization_code": self.organization.code}) + ) + if task.type == "boefje": + return redirect(reverse("boefjes_task_list", kwargs={"organization_code": self.organization.code})) + + return redirect(reverse("task_list", kwargs={"organization_code": self.organization.code})) + return self.get(request, *args, **kwargs) + + def handle_page_action(self, action: str): + if action == PageActions.RESCHEDULE_TASK.value: + task_id = self.request.POST.get("task_id") + task = client.get_task_details(task_id) + + new_id = uuid.uuid4() + + p_item = task.p_item + p_item.id = new_id + p_item.data.id = new_id + + client.push_task(f"{task.type}-{self.organization.code}", task.p_item) + + success_message = ( + "Your task is scheduled and will soon be started in the background. \n " + "Results will be added to the object list when they are in. " + "It may take some time, a refresh of the page may be needed to show the results." + ) + messages.add_message(self.request, messages.SUCCESS, success_message) + return + def get_context_data(self, **kwargs): context = super().get_context_data(**kwargs) context["breadcrumbs"] = [ @@ -81,11 +125,11 @@ def get_context_data(self, **kwargs): return context -class BoefjesTaskListView(TaskListView): +class BoefjesTaskListView(BoefjeMixin, TaskListView): template_name = "tasks/boefjes.html" plugin_type = "boefje" -class NormalizersTaskListView(TaskListView): +class NormalizersTaskListView(NormalizerMixin, TaskListView): template_name = "tasks/normalizers.html" plugin_type = "normalizer" From 9f647b6908d5a1d3c4f5c391611489685b538e2a Mon Sep 17 00:00:00 2001 From: Jeroen Dekkers Date: Wed, 12 Jul 2023 21:10:07 +0200 Subject: [PATCH 03/18] Add Debian build depends for CVE API package (#1384) --- .github/workflows/build-debian-docker-image.yml | 2 +- packaging/debian12/Dockerfile | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build-debian-docker-image.yml b/.github/workflows/build-debian-docker-image.yml index c4757feec71..f7291a219ba 100644 --- a/.github/workflows/build-debian-docker-image.yml +++ b/.github/workflows/build-debian-docker-image.yml @@ -4,7 +4,7 @@ on: workflow_dispatch: {} pull_request: paths: - - "packaging" + - "packaging/**" - ".github/workflows/build-debian-docker-image.yml" env: diff --git a/packaging/debian12/Dockerfile b/packaging/debian12/Dockerfile index d5f8c26ed93..c77d143603a 100644 --- a/packaging/debian12/Dockerfile +++ b/packaging/debian12/Dockerfile @@ -11,6 +11,7 @@ RUN apt-get update && apt-get -y upgrade && \ apt-get install -y --no-install-recommends \ gettext devscripts debhelper equivs \ python3-setuptools python3-pip python3-dev \ + dh-sequence-python3 pybuild-plugin-pyproject python3-poetry \ libssl-dev dh-virtualenv libpq-dev libssl-dev \ build-essential nodejs git openssh-client \ libfreetype-dev zlib1g-dev libjpeg-dev libffi-dev From 7f718965f2bf4c07a56e80e017e550dff2705ccb Mon Sep 17 00:00:00 2001 From: Roelof Korporaal Date: Thu, 13 Jul 2023 14:52:18 +0200 Subject: [PATCH 04/18] Remove hardcoded clearance level in member list for superusers (#1390) --- .../organization_member_list.html | 20 ++++++------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/rocky/rocky/templates/organizations/organization_member_list.html b/rocky/rocky/templates/organizations/organization_member_list.html index 0137f66e350..ee389b8d7f3 100644 --- a/rocky/rocky/templates/organizations/organization_member_list.html +++ b/rocky/rocky/templates/organizations/organization_member_list.html @@ -70,25 +70,17 @@

{% translate "Members" %}

{{ member.user.date_joined|date:"SHORT_DATE_FORMAT" }} - {% if member.user.is_superuser %} - L4 + {% if member.trusted_clearance_level < 0 %} + None {% else %} - {% if member.trusted_clearance_level < 0 %} - None - {% else %} - L{{ member.trusted_clearance_level }} - {% endif %} + L{{ member.trusted_clearance_level }} {% endif %} - {% if member.user.is_superuser %} - L4 + {% if member.trusted_clearance_level < 0 %} + None {% else %} - {% if member.trusted_clearance_level < 0 %} - None - {% else %} - L{{ member.acknowledged_clearance_level }} - {% endif %} + L{{ member.acknowledged_clearance_level }} {% endif %} From b7e6be6bd172d277a7d01106ed522d280e4854b1 Mon Sep 17 00:00:00 2001 From: JP Bruins Slot Date: Fri, 14 Jul 2023 12:13:00 +0200 Subject: [PATCH 05/18] Add configurable octpoes request timeout (#1382) Co-authored-by: ammar92 Co-authored-by: Patrick --- mula/scheduler/config/settings.py | 1 + mula/scheduler/connectors/services/octopoes.py | 10 ++++++++-- mula/scheduler/context/context.py | 1 + 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/mula/scheduler/config/settings.py b/mula/scheduler/config/settings.py index 9342a243a66..2438d88b417 100644 --- a/mula/scheduler/config/settings.py +++ b/mula/scheduler/config/settings.py @@ -23,6 +23,7 @@ class Settings(BaseSettings): normalizer_populate: bool = Field(True, env="SCHEDULER_NORMALIZER_POPULATE") katalogus_cache_ttl: int = Field(30, env="SCHEDULER_KATALOGUS_CACHE_TTL") monitor_organisations_interval: int = Field(60, env="SCHEDULER_MONITOR_ORGANISATIONS_INTERVAL") + octopoes_request_timeout: int = Field(10, env="SCHEDULER_OCTOPOES_REQUEST_TIMEOUT") # External services settings host_katalogus: str = Field(..., env="KATALOGUS_API") diff --git a/mula/scheduler/connectors/services/octopoes.py b/mula/scheduler/connectors/services/octopoes.py index 4e9ea22ff50..5225d6c2ac7 100644 --- a/mula/scheduler/connectors/services/octopoes.py +++ b/mula/scheduler/connectors/services/octopoes.py @@ -10,9 +10,15 @@ class Octopoes(HTTPService): name = "octopoes" health_endpoint = None - def __init__(self, host: str, source: str, orgs: List[Organisation]): + def __init__( + self, + host: str, + source: str, + orgs: List[Organisation], + timeout: int = 10, + ): self.orgs: List[Organisation] = orgs - super().__init__(host, source) + super().__init__(host, source, timeout) @exception_handler def get_objects_by_object_types( diff --git a/mula/scheduler/context/context.py b/mula/scheduler/context/context.py index d6a4f9336d9..37abdd7b791 100644 --- a/mula/scheduler/context/context.py +++ b/mula/scheduler/context/context.py @@ -52,6 +52,7 @@ def __init__(self) -> None: host=self.config.host_octopoes, source=f"scheduler/{scheduler.__version__}", orgs=katalogus_service.get_organisations(), + timeout=self.config.octopoes_request_timeout, ) # Register external services, SimpleNamespace allows us to use dot From 415160e8a39b231f5f798f206719b922519fd047 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 14 Jul 2023 12:14:12 +0200 Subject: [PATCH 06/18] Fix in the user guide docs (#1391) Co-authored-by: ammar92 --- docs/source/manual/index.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/source/manual/index.rst b/docs/source/manual/index.rst index afa44c3ad90..fe34791d4d4 100644 --- a/docs/source/manual/index.rst +++ b/docs/source/manual/index.rst @@ -1,3 +1,8 @@ +User Guide +########## + +An overview of all KAT functionality, from a user perspective. + .. toctree:: :maxdepth: 4 :caption: Contents From 980d9b2d249c83eb7d0e6acc2ca1dd16c4145d59 Mon Sep 17 00:00:00 2001 From: Patrick Date: Fri, 14 Jul 2023 15:44:18 +0200 Subject: [PATCH 07/18] Add explicit `black` config to all modules (#1395) --- boefjes/pyproject.toml | 4 ++++ bytes/pyproject.toml | 4 ++++ keiko/pyproject.toml | 4 ++++ mula/pyproject.toml | 4 ++++ octopoes/pyproject.toml | 4 ++++ pyproject.toml | 2 +- rocky/pyproject.toml | 4 ++++ 7 files changed, 25 insertions(+), 1 deletion(-) diff --git a/boefjes/pyproject.toml b/boefjes/pyproject.toml index 5aff531cfac..9d4aa5967ce 100644 --- a/boefjes/pyproject.toml +++ b/boefjes/pyproject.toml @@ -40,3 +40,7 @@ build-backend = "setuptools.build_meta:__legacy__" [tool.flynt] line-length = 120 transform-concats = true + +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 diff --git a/bytes/pyproject.toml b/bytes/pyproject.toml index b54bff31e6f..36b1ac42087 100644 --- a/bytes/pyproject.toml +++ b/bytes/pyproject.toml @@ -85,3 +85,7 @@ wrapt = "^1.14.1" [build-system] requires = ["setuptools>=65", "wheel"] build-backend = "setuptools.build_meta:__legacy__" + +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 diff --git a/keiko/pyproject.toml b/keiko/pyproject.toml index f84c9294924..de8473487e8 100644 --- a/keiko/pyproject.toml +++ b/keiko/pyproject.toml @@ -34,3 +34,7 @@ httpx = "^0.23.3" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 diff --git a/mula/pyproject.toml b/mula/pyproject.toml index 80443c7514e..917eedced4f 100644 --- a/mula/pyproject.toml +++ b/mula/pyproject.toml @@ -47,3 +47,7 @@ build-backend = "setuptools.build_meta:__legacy__" omit = [ "scheduler/alembic/*" ] + +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 diff --git a/octopoes/pyproject.toml b/octopoes/pyproject.toml index af7b025018c..b823d7efd8b 100644 --- a/octopoes/pyproject.toml +++ b/octopoes/pyproject.toml @@ -51,3 +51,7 @@ pytest-mock = "^3.10.0" pre-commit = "^2.20.0" httpx = "^0.23.3" pytest-timeout = "^2.1.0" + +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 diff --git a/pyproject.toml b/pyproject.toml index ecd83bf8967..c9da5fba389 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [tool.black] -target-version = ["py38"] +target-version = ["py38", "py39", "py310", "py311"] line-length = 120 [tool.mypy] diff --git a/rocky/pyproject.toml b/rocky/pyproject.toml index 6c4c9be80cc..0d56761a3a7 100644 --- a/rocky/pyproject.toml +++ b/rocky/pyproject.toml @@ -103,3 +103,7 @@ max_line_length = 120 blank_line_after_tag = "load,extends,include" # https://www.djlint.com/docs/linter/#rules ignore = "H006,H016,H017,H030,H031" + +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 From 6e5ff3ef0b6c655f1c0f22ee3f90dd846c54315a Mon Sep 17 00:00:00 2001 From: Jeroen Dekkers Date: Mon, 17 Jul 2023 09:40:14 +0200 Subject: [PATCH 08/18] Build the Debian build image on the main branch (#1387) --- .github/workflows/build-debian-docker-image.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/build-debian-docker-image.yml b/.github/workflows/build-debian-docker-image.yml index f7291a219ba..715557c55c6 100644 --- a/.github/workflows/build-debian-docker-image.yml +++ b/.github/workflows/build-debian-docker-image.yml @@ -2,6 +2,12 @@ name: Create and publish Docker image for building Debian packages on: workflow_dispatch: {} + push: + branches: + - 'main' + paths: + - "packaging" + - ".github/workflows/build-debian-docker-image.yml" pull_request: paths: - "packaging/**" From a22ca38763c748d327455f6fecdbce71cfc62041 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 09:53:19 +0200 Subject: [PATCH 09/18] Bump cryptography from 41.0.1 to 41.0.2 in /bytes (#1397) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Jan Klopper --- bytes/poetry.lock | 46 +++++++++++++++++++++----------------- bytes/pyproject.toml | 2 +- bytes/requirements-dev.txt | 44 +++++++++++++++++++----------------- bytes/requirements.txt | 44 +++++++++++++++++++----------------- 4 files changed, 74 insertions(+), 62 deletions(-) diff --git a/bytes/poetry.lock b/bytes/poetry.lock index 312bd3a65ed..505b93dca31 100644 --- a/bytes/poetry.lock +++ b/bytes/poetry.lock @@ -410,30 +410,34 @@ files = [ [[package]] name = "cryptography" -version = "41.0.1" +version = "41.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699"}, - {file = "cryptography-41.0.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b"}, - {file = "cryptography-41.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db"}, - {file = "cryptography-41.0.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31"}, - {file = "cryptography-41.0.1-cp37-abi3-win32.whl", hash = "sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5"}, - {file = "cryptography-41.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039"}, - {file = "cryptography-41.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a"}, - {file = "cryptography-41.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5"}, - {file = "cryptography-41.0.1.tar.gz", hash = "sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711"}, + {file = "cryptography-41.0.2-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182"}, + {file = "cryptography-41.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5"}, + {file = "cryptography-41.0.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58"}, + {file = "cryptography-41.0.2-cp37-abi3-win32.whl", hash = "sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76"}, + {file = "cryptography-41.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766"}, + {file = "cryptography-41.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa"}, + {file = "cryptography-41.0.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f"}, + {file = "cryptography-41.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0"}, + {file = "cryptography-41.0.2.tar.gz", hash = "sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c"}, ] [package.dependencies] @@ -2164,4 +2168,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "f0c87844e10f37661d87e9c17e281fdc6182aa6f39b37152b94f9fb832cda526" +content-hash = "12dad90ac43f50d47a26cfb090091e37d3fe80e65930cf2714ee19cf5cc03eb1" diff --git a/bytes/pyproject.toml b/bytes/pyproject.toml index 36b1ac42087..23f4eeefb79 100644 --- a/bytes/pyproject.toml +++ b/bytes/pyproject.toml @@ -16,7 +16,7 @@ certifi = "^2023.5.7" cffi = "^1.15.1" charset-normalizer = "^3.1.0" click = "^8.1.3" -cryptography = "^41.0.1" +cryptography = "^41.0.2" ecdsa = "^0.18.0" fastapi = "^0.92.0" greenlet = "^2.0.1" diff --git a/bytes/requirements-dev.txt b/bytes/requirements-dev.txt index aa9b40ee8c4..8324a45f4c8 100644 --- a/bytes/requirements-dev.txt +++ b/bytes/requirements-dev.txt @@ -220,26 +220,30 @@ click==8.1.3 ; python_version >= "3.8" and python_version < "4.0" \ colorama==0.4.6 ; python_version >= "3.8" and python_version < "4.0" and (platform_system == "Windows" or sys_platform == "win32") \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 -cryptography==41.0.1 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \ - --hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \ - --hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \ - --hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \ - --hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \ - --hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \ - --hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \ - --hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \ - --hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \ - --hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \ - --hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \ - --hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \ - --hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \ - --hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \ - --hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \ - --hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \ - --hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \ - --hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \ - --hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699 +cryptography==41.0.2 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 deprecated==1.2.14 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c \ --hash=sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3 diff --git a/bytes/requirements.txt b/bytes/requirements.txt index c55107304ed..762de36980c 100644 --- a/bytes/requirements.txt +++ b/bytes/requirements.txt @@ -185,26 +185,30 @@ click==8.1.3 ; python_version >= "3.8" and python_version < "4.0" \ colorama==0.4.6 ; python_version >= "3.8" and python_version < "4.0" and platform_system == "Windows" \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 -cryptography==41.0.1 ; python_version >= "3.8" and python_version < "4.0" \ - --hash=sha256:059e348f9a3c1950937e1b5d7ba1f8e968508ab181e75fc32b879452f08356db \ - --hash=sha256:1a5472d40c8f8e91ff7a3d8ac6dfa363d8e3138b961529c996f3e2df0c7a411a \ - --hash=sha256:1a8e6c2de6fbbcc5e14fd27fb24414507cb3333198ea9ab1258d916f00bc3039 \ - --hash=sha256:1fee5aacc7367487b4e22484d3c7e547992ed726d14864ee33c0176ae43b0d7c \ - --hash=sha256:5d092fdfedaec4cbbffbf98cddc915ba145313a6fdaab83c6e67f4e6c218e6f3 \ - --hash=sha256:5f0ff6e18d13a3de56f609dd1fd11470918f770c6bd5d00d632076c727d35485 \ - --hash=sha256:7bfc55a5eae8b86a287747053140ba221afc65eb06207bedf6e019b8934b477c \ - --hash=sha256:7fa01527046ca5facdf973eef2535a27fec4cb651e4daec4d043ef63f6ecd4ca \ - --hash=sha256:8dde71c4169ec5ccc1087bb7521d54251c016f126f922ab2dfe6649170a3b8c5 \ - --hash=sha256:8f4ab7021127a9b4323537300a2acfb450124b2def3756f64dc3a3d2160ee4b5 \ - --hash=sha256:948224d76c4b6457349d47c0c98657557f429b4e93057cf5a2f71d603e2fc3a3 \ - --hash=sha256:9a6c7a3c87d595608a39980ebaa04d5a37f94024c9f24eb7d10262b92f739ddb \ - --hash=sha256:b46e37db3cc267b4dea1f56da7346c9727e1209aa98487179ee8ebed09d21e43 \ - --hash=sha256:b4ceb5324b998ce2003bc17d519080b4ec8d5b7b70794cbd2836101406a9be31 \ - --hash=sha256:cb33ccf15e89f7ed89b235cff9d49e2e62c6c981a6061c9c8bb47ed7951190bc \ - --hash=sha256:d198820aba55660b4d74f7b5fd1f17db3aa5eb3e6893b0a41b75e84e4f9e0e4b \ - --hash=sha256:d34579085401d3f49762d2f7d6634d6b6c2ae1242202e860f4d26b046e3a1006 \ - --hash=sha256:eb8163f5e549a22888c18b0d53d6bb62a20510060a22fd5a995ec8a05268df8a \ - --hash=sha256:f73bff05db2a3e5974a6fd248af2566134d8981fd7ab012e5dd4ddb1d9a70699 +cryptography==41.0.2 ; python_version >= "3.8" and python_version < "4.0" \ + --hash=sha256:01f1d9e537f9a15b037d5d9ee442b8c22e3ae11ce65ea1f3316a41c78756b711 \ + --hash=sha256:079347de771f9282fbfe0e0236c716686950c19dee1b76240ab09ce1624d76d7 \ + --hash=sha256:182be4171f9332b6741ee818ec27daff9fb00349f706629f5cbf417bd50e66fd \ + --hash=sha256:192255f539d7a89f2102d07d7375b1e0a81f7478925b3bc2e0549ebf739dae0e \ + --hash=sha256:2a034bf7d9ca894720f2ec1d8b7b5832d7e363571828037f9e0c4f18c1b58a58 \ + --hash=sha256:342f3767e25876751e14f8459ad85e77e660537ca0a066e10e75df9c9e9099f0 \ + --hash=sha256:439c3cc4c0d42fa999b83ded80a9a1fb54d53c58d6e59234cfe97f241e6c781d \ + --hash=sha256:49c3222bb8f8e800aead2e376cbef687bc9e3cb9b58b29a261210456a7783d83 \ + --hash=sha256:674b669d5daa64206c38e507808aae49904c988fa0a71c935e7006a3e1e83831 \ + --hash=sha256:7a9a3bced53b7f09da251685224d6a260c3cb291768f54954e28f03ef14e3766 \ + --hash=sha256:7af244b012711a26196450d34f483357e42aeddb04128885d95a69bd8b14b69b \ + --hash=sha256:7d230bf856164de164ecb615ccc14c7fc6de6906ddd5b491f3af90d3514c925c \ + --hash=sha256:84609ade00a6ec59a89729e87a503c6e36af98ddcd566d5f3be52e29ba993182 \ + --hash=sha256:9a6673c1828db6270b76b22cc696f40cde9043eb90373da5c2f8f2158957f42f \ + --hash=sha256:9b6d717393dbae53d4e52684ef4f022444fc1cce3c48c38cb74fca29e1f08eaa \ + --hash=sha256:9c3fe6534d59d071ee82081ca3d71eed3210f76ebd0361798c74abc2bcf347d4 \ + --hash=sha256:a719399b99377b218dac6cf547b6ec54e6ef20207b6165126a280b0ce97e0d2a \ + --hash=sha256:b332cba64d99a70c1e0836902720887fb4529ea49ea7f5462cf6640e095e11d2 \ + --hash=sha256:d124682c7a23c9764e54ca9ab5b308b14b18eba02722b8659fb238546de83a76 \ + --hash=sha256:d73f419a56d74fef257955f51b18d046f3506270a5fd2ac5febbfa259d6c0fa5 \ + --hash=sha256:f0dc40e6f7aa37af01aba07277d3d64d5a03dc66d682097541ec4da03cc140ee \ + --hash=sha256:f14ad275364c8b4e525d018f6716537ae7b6d369c094805cae45300847e0894f \ + --hash=sha256:f772610fe364372de33d76edcd313636a25684edb94cee53fd790195f5989d14 deprecated==1.2.14 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:6fac8b097794a90302bdbb17b9b815e732d3c4720583ff1b198499d78470466c \ --hash=sha256:e5323eb936458dccc2582dc6f9c322c852a775a27065ff2b0c4970b9d53d01b3 From d083857598504ca9b179253bf271c0f5b37351ee Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jul 2023 10:47:34 +0200 Subject: [PATCH 10/18] Bump cryptography from 41.0.0 to 41.0.2 in /boefjes/boefjes/plugins/kat_ssl_certificates (#1396) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jeroen Dekkers --- boefjes/boefjes/plugins/kat_ssl_certificates/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/boefjes/boefjes/plugins/kat_ssl_certificates/requirements.txt b/boefjes/boefjes/plugins/kat_ssl_certificates/requirements.txt index d5655d147b5..ba3e03ecf9e 100644 --- a/boefjes/boefjes/plugins/kat_ssl_certificates/requirements.txt +++ b/boefjes/boefjes/plugins/kat_ssl_certificates/requirements.txt @@ -1 +1 @@ -cryptography==41.0.0 +cryptography==41.0.2 From 38753e36810c237b7ef27083f25ac49dfa6f815b Mon Sep 17 00:00:00 2001 From: Rieven Date: Wed, 19 Jul 2023 10:12:40 +0200 Subject: [PATCH 11/18] Remove member group checks and check for permission instead (#1275) Co-authored-by: ammar92 Co-authored-by: Patrick Co-authored-by: Roelof Korporaal --- rocky/account/forms/account_setup.py | 2 +- .../account/step_2c_account_setup_intro.html | 2 +- rocky/onboarding/view_helpers.py | 6 + rocky/onboarding/views.py | 79 ++-- rocky/rocky/middleware/onboarding.py | 18 +- rocky/tests/conftest.py | 1 + rocky/tests/test_groups_and_permissions.py | 16 - rocky/tests/test_onboarding_admin_wizard.py | 15 +- .../test_onboarding_organization_steps.py | 375 ++++++------------ .../management/commands/setup_dev_account.py | 1 + .../0040_update_admin_permission.py | 33 ++ rocky/tools/models.py | 12 - 12 files changed, 228 insertions(+), 332 deletions(-) create mode 100644 rocky/tools/migrations/0040_update_admin_permission.py diff --git a/rocky/account/forms/account_setup.py b/rocky/account/forms/account_setup.py index 239d483b35c..0a411123d17 100644 --- a/rocky/account/forms/account_setup.py +++ b/rocky/account/forms/account_setup.py @@ -159,7 +159,7 @@ def save(self, **kwargs): status=OrganizationMember.STATUSES.ACTIVE, ) member.groups.add(selected_group.id) - if member.is_admin or self.user.is_superuser: + if member.has_perm("change_organizationmember") or self.user.is_superuser: member.acknowledged_clearance_level = 4 member.trusted_clearance_level = 4 member.save() diff --git a/rocky/onboarding/templates/account/step_2c_account_setup_intro.html b/rocky/onboarding/templates/account/step_2c_account_setup_intro.html index abf20ad31cd..1936f4f95f8 100644 --- a/rocky/onboarding/templates/account/step_2c_account_setup_intro.html +++ b/rocky/onboarding/templates/account/step_2c_account_setup_intro.html @@ -37,7 +37,7 @@

{% translate "Single account setup:" %}

{% endif %} diff --git a/rocky/onboarding/view_helpers.py b/rocky/onboarding/view_helpers.py index bf22f1a7738..8e5008c9c89 100644 --- a/rocky/onboarding/view_helpers.py +++ b/rocky/onboarding/view_helpers.py @@ -3,6 +3,12 @@ from django.utils.translation import gettext_lazy as _ from tools.view_helpers import StepsMixin +ONBOARDING_PERMISSIONS = ( + "tools.can_scan_organization", + "tools.can_set_clearance_level", + "tools.can_enable_disable_boefje", +) + class KatIntroductionStepsMixin(StepsMixin, OrganizationView): def build_steps(self): diff --git a/rocky/onboarding/views.py b/rocky/onboarding/views.py index 79dffbc1a4b..561b92da418 100644 --- a/rocky/onboarding/views.py +++ b/rocky/onboarding/views.py @@ -4,12 +4,10 @@ from account.mixins import ( OrganizationPermissionRequiredMixin, OrganizationView, - PermissionRequiredMixin, ) from django.contrib import messages from django.contrib.auth import get_user_model -from django.contrib.auth.mixins import UserPassesTestMixin -from django.contrib.auth.models import Group +from django.contrib.auth.mixins import PermissionRequiredMixin from django.core.exceptions import BadRequest from django.http import Http404 from django.shortcuts import redirect @@ -20,7 +18,7 @@ from django.views.generic.edit import CreateView, FormView, UpdateView from katalogus.client import get_katalogus from tools.forms.boefje import SelectBoefjeForm -from tools.models import GROUP_REDTEAM, Organization, OrganizationMember +from tools.models import Organization, OrganizationMember from tools.ooi_form import OOIForm from tools.ooi_helpers import ( create_object_tree_item_from_ref, @@ -40,6 +38,7 @@ OnboardingSetClearanceLevelForm, ) from onboarding.view_helpers import ( + ONBOARDING_PERMISSIONS, KatIntroductionAdminStepsMixin, KatIntroductionRegistrationStepsMixin, KatIntroductionStepsMixin, @@ -71,57 +70,53 @@ class OnboardingStart(OrganizationView): def get(self, request, *args, **kwargs): if request.user.is_superuser: return redirect("step_introduction_registration") - if self.organization_member.is_redteam: + if self.organization_member.has_perms(ONBOARDING_PERMISSIONS): return redirect("step_introduction", kwargs={"organization_code": self.organization.code}) return redirect("crisis_room") -class RedteamRequiredMixin(UserPassesTestMixin): - def test_func(self): - if self.request.user.is_superuser: - return True - members = OrganizationMember.objects.filter(user=self.request.user) - return any(member.is_redteam for member in members) - - # REDTEAMER FLOW class OnboardingIntroductionView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, TemplateView, ): template_name = "step_1_introduction.html" current_step = 1 + permission_required = "tools.can_scan_organization" class OnboardingChooseReportInfoView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, TemplateView, ): template_name = "step_2a_choose_report_info.html" current_step = 2 + permission_required = "tools.can_scan_organization" class OnboardingChooseReportTypeView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, TemplateView, ): template_name = "step_2b_choose_report_type.html" current_step = 2 + permission_required = "tools.can_scan_organization" class OnboardingSetupScanSelectPluginsView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, TemplateView, ): template_name = "step_3e_setup_scan_select_plugins.html" current_step = 3 report: Type[Report] = DNSReport + permission_required = "tools.can_enable_disable_boefje" def get_form(self): boefjes = self.report.get_boefjes(self.organization) @@ -161,12 +156,13 @@ def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: class OnboardingSetupScanOOIInfoView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, TemplateView, ): template_name = "step_3a_setup_scan_ooi_info.html" current_step = 3 + permission_required = "tools.can_scan_organization" class OnboardingOOIForm(OOIForm): @@ -185,12 +181,13 @@ def get_fields(self): class OnboardingSetupScanOOIAddView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, BaseOOIFormView, ): template_name = "step_3b_setup_scan_ooi_add.html" current_step = 3 + permission_required = "tools.can_scan_organization" form_class = OnboardingOOIForm hidden_form_fields = { "network": { @@ -251,7 +248,7 @@ def get_context_data(self, **kwargs: Any) -> Dict[str, Any]: class OnboardingSetupScanOOIDetailView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, SingleOOITreeMixin, KatIntroductionStepsMixin, OnboardingBreadcrumbsMixin, @@ -259,6 +256,7 @@ class OnboardingSetupScanOOIDetailView( ): template_name = "step_3c_setup_scan_ooi_detail.html" current_step = 3 + permission_required = "tools.can_scan_organization" def get_ooi_id(self) -> str: if "ooi_id" in self.request.session: @@ -321,13 +319,14 @@ def get_context_data(self, **kwargs): class OnboardingSetClearanceLevelView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, OnboardingBreadcrumbsMixin, FormView, ): template_name = "step_3d_set_clearance_level.html" form_class = OnboardingSetClearanceLevelForm + permission_required = "tools.can_set_clearance_level" current_step = 3 initial = {"level": 2} @@ -371,12 +370,13 @@ def get_boefjes_tiles(self): class OnboardingReportView( - RedteamRequiredMixin, + OrganizationPermissionRequiredMixin, KatIntroductionStepsMixin, TemplateView, ): template_name = "step_4_report.html" current_step = 4 + permission_required = "tools.can_scan_organization" def post(self, request, *args, **kwargs): if "ooi_id" not in request.GET: @@ -393,7 +393,7 @@ def set_member_onboarded(self): member.save() -class BaseReportView(RedteamRequiredMixin, BaseOOIDetailView): +class BaseReportView(BaseOOIDetailView): report: Type[Report] depth = 15 @@ -413,8 +413,9 @@ def get_context_data(self, **kwargs): return context -class DnsReportView(OnboardingBreadcrumbsMixin, BaseReportView): +class DnsReportView(OrganizationPermissionRequiredMixin, OnboardingBreadcrumbsMixin, BaseReportView): template_name = "dns_report.html" + permission_required = "tools.can_scan_organization" report = DNSReport @@ -424,24 +425,19 @@ class RegistrationBreadcrumbsMixin(BreadcrumbsMixin): ] -class AdminRequiredMixin(UserPassesTestMixin): - def test_func(self): - if self.request.user.is_superuser: - return True - members = OrganizationMember.objects.filter(user=self.request.user) - return any(member.is_admin for member in members) - - # account flow -class OnboardingIntroductionRegistrationView(AdminRequiredMixin, KatIntroductionRegistrationStepsMixin, TemplateView): +class OnboardingIntroductionRegistrationView( + PermissionRequiredMixin, KatIntroductionRegistrationStepsMixin, TemplateView +): """ Step: 1 - Registration introduction """ template_name = "account/step_1_registration_intro.html" current_step = 1 + permission_required = "tools.add_organizationmember" class OnboardingOrganizationSetupView( @@ -548,16 +544,21 @@ def get_success_url(self) -> str: return reverse_lazy("step_account_setup_intro", kwargs={"organization_code": self.organization.code}) -class OnboardingAccountSetupIntroView(AdminRequiredMixin, KatIntroductionAdminStepsMixin, TemplateView): +class OnboardingAccountSetupIntroView( + OrganizationPermissionRequiredMixin, KatIntroductionAdminStepsMixin, TemplateView +): """ Step 4: Split flow to or continue with single account or continue to multiple account creation """ template_name = "account/step_2c_account_setup_intro.html" current_step = 4 + permission_required = "tools.add_organizationmember" + +class OnboardingAccountCreationMixin(OrganizationPermissionRequiredMixin, KatIntroductionAdminStepsMixin, CreateView): + permission_required = "tools.add_organizationmember" -class OnboardingAccountCreationMixin(AdminRequiredMixin, KatIntroductionAdminStepsMixin, CreateView): def get_form_kwargs(self): kwargs = super().get_form_kwargs() kwargs["organization_code"] = self.organization.code @@ -567,13 +568,14 @@ def get_form_kwargs(self): # Account setup for multiple user accounts: redteam, admins, clients -class OnboardingChooseUserTypeView(KatIntroductionAdminStepsMixin, TemplateView): +class OnboardingChooseUserTypeView(OrganizationPermissionRequiredMixin, KatIntroductionAdminStepsMixin, TemplateView): """ Step 1: Introduction about how to create multiple user accounts """ current_step = 4 template_name = "account/step_3_account_user_type.html" + permission_required = "tools.add_organizationmember" class OnboardingAccountSetupAdminView( @@ -640,7 +642,7 @@ class OnboardingAccountSetupClientView(RegistrationBreadcrumbsMixin, OnboardingA current_step = 4 def get_success_url(self, **kwargs): - return reverse_lazy("crisis_room") + return reverse_lazy("complete_onboarding", kwargs={"organization_code": self.organization.code}) def form_valid(self, form): name = form.cleaned_data["name"] @@ -658,9 +660,6 @@ class CompleteOnboarding(OrganizationView): """ def get(self, request, *args, **kwargs): - if self.request.user.is_superuser and not self.organization_member.is_redteam: - self.organization_member.groups.add(Group.objects.get(name=GROUP_REDTEAM)) - return redirect(reverse("step_introduction", kwargs={"organization_code": self.organization.code})) self.organization_member.onboarded = True self.organization_member.status = OrganizationMember.STATUSES.ACTIVE self.organization_member.save() diff --git a/rocky/rocky/middleware/onboarding.py b/rocky/rocky/middleware/onboarding.py index ce08aec947a..1f68d538e05 100644 --- a/rocky/rocky/middleware/onboarding.py +++ b/rocky/rocky/middleware/onboarding.py @@ -1,5 +1,6 @@ from django.shortcuts import redirect from django.urls.base import reverse +from onboarding.view_helpers import ONBOARDING_PERMISSIONS from tools.models import OrganizationMember @@ -25,18 +26,17 @@ def middleware(request): ) and not member_onboarded ): - if request.user.is_superuser and not member_onboarded: + # Not onboarded superusers goes to registration of the their first organization + adding members to it. + if request.user.is_superuser: return redirect(reverse("step_introduction_registration")) - if not member_onboarded: - member = OrganizationMember.objects.filter(user=request.user) + member = OrganizationMember.objects.filter(user=request.user) - # There might be redteamers without an organization after an organization is deleted. - if member.exists() and member.first().is_redteam: - # a redteamer can be in many organizations, but we onboard the first one. - return redirect( - reverse("step_introduction", kwargs={"organization_code": member.first().organization.code}) - ) + # Members with these permissions can run a full DNS-report onboarding. + if member.exists() and member.first().has_perms(ONBOARDING_PERMISSIONS): + return redirect( + reverse("step_introduction", kwargs={"organization_code": member.first().organization.code}) + ) return response diff --git a/rocky/tests/conftest.py b/rocky/tests/conftest.py index f748ef302a5..8ef23f46a05 100644 --- a/rocky/tests/conftest.py +++ b/rocky/tests/conftest.py @@ -76,6 +76,7 @@ def add_admin_group_permissions(member): Permission.objects.get(codename="change_organizationmember").id, Permission.objects.get(codename="can_delete_oois").id, Permission.objects.get(codename="add_indemnification").id, + Permission.objects.get(codename="can_scan_organization").id, ] group.permissions.set(admin_permissions) diff --git a/rocky/tests/test_groups_and_permissions.py b/rocky/tests/test_groups_and_permissions.py index 049db8a58a3..22e42ff5ce9 100644 --- a/rocky/tests/test_groups_and_permissions.py +++ b/rocky/tests/test_groups_and_permissions.py @@ -8,22 +8,6 @@ from tests.conftest import setup_request -def test_is_not_red_team(superuser_member): - assert not superuser_member.is_redteam - - -def test_is_red_team(redteam_member): - assert redteam_member.is_redteam - - -def test_is_not_admin(superuser_member): - assert not superuser_member.is_admin - - -def test_is_admin(admin_member): - assert admin_member.is_admin - - def test_indemnification_present(superuser_member): assert superuser_member.user.indemnification_set.exists() diff --git a/rocky/tests/test_onboarding_admin_wizard.py b/rocky/tests/test_onboarding_admin_wizard.py index d948daf7662..131a2e1a180 100644 --- a/rocky/tests/test_onboarding_admin_wizard.py +++ b/rocky/tests/test_onboarding_admin_wizard.py @@ -14,18 +14,21 @@ def test_admin_onboarding_registration(rf, superuser_member, admin_member, redteam_member, client_member): + """ + This onboarding is before an organization has been created and it is only visible for superusers. + """ response_superuser = OnboardingIntroductionRegistrationView.as_view()( setup_request(rf.get("step_introduction_registration"), superuser_member.user), organization_code=superuser_member.organization.code, ) - response_admin = OnboardingIntroductionRegistrationView.as_view()( - setup_request(rf.get("step_introduction_registration"), admin_member.user), - organization_code=admin_member.organization.code, - ) - assert response_superuser.status_code == 200 - assert response_admin.status_code == 200 + + with pytest.raises(PermissionDenied): + OnboardingIntroductionRegistrationView.as_view()( + setup_request(rf.get("step_introduction_registration"), admin_member.user), + organization_code=admin_member.organization.code, + ) with pytest.raises(PermissionDenied): OnboardingIntroductionRegistrationView.as_view()( diff --git a/rocky/tests/test_onboarding_organization_steps.py b/rocky/tests/test_onboarding_organization_steps.py index aeb07025d0d..6baf9e9509b 100644 --- a/rocky/tests/test_onboarding_organization_steps.py +++ b/rocky/tests/test_onboarding_organization_steps.py @@ -17,185 +17,114 @@ from tests.conftest import setup_request -def test_onboarding_introduction(rf, superuser_member, admin_member, redteam_member, client_member): - response_superuser = OnboardingIntroductionView.as_view()( - setup_request(rf.get("step_introduction"), superuser_member.user), - organization_code=superuser_member.organization.code, - ) - response_redteam = OnboardingIntroductionView.as_view()( - setup_request(rf.get("step_introduction"), redteam_member.user), - organization_code=redteam_member.organization.code, +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_introduction(request, member, rf): + member = request.getfixturevalue(member) + response = OnboardingIntroductionView.as_view()( + setup_request(rf.get("step_introduction"), member.user), + organization_code=member.organization.code, ) - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 - assertContains(response_redteam, "Welcome to OpenKAT") - assertContains(response_redteam, "Skip onboarding") - assertContains(response_redteam, "Let's get started") - - with pytest.raises(PermissionDenied): - OnboardingIntroductionView.as_view()( - setup_request(rf.get("step_introduction"), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingIntroductionView.as_view()( - setup_request(rf.get("step_introduction"), client_member.user), - organization_code=client_member.organization.code, - ) + assert response.status_code == 200 + assertContains(response, "Welcome to OpenKAT") + assertContains(response, "Skip onboarding") + assertContains(response, "Let's get started") -def test_onboarding_choose_report_info(rf, superuser_member, admin_member, redteam_member, client_member): - response_superuser = OnboardingChooseReportInfoView.as_view()( - setup_request(rf.get("step_choose_report_info"), superuser_member.user), - organization_code=superuser_member.organization.code, - ) - response_redteam = OnboardingChooseReportInfoView.as_view()( - setup_request(rf.get("step_choose_report_info"), redteam_member.user), - organization_code=redteam_member.organization.code, +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_choose_report_info(request, member, rf): + member = request.getfixturevalue(member) + response = OnboardingChooseReportInfoView.as_view()( + setup_request(rf.get("step_choose_report_info"), member.user), + organization_code=member.organization.code, ) - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 + assert response.status_code == 200 - assertContains(response_redteam, "KAT introduction") - assertContains(response_redteam, "Reports") - assertContains(response_redteam, "Data") - assertContains(response_redteam, "Skip onboarding") - assertContains(response_redteam, "Let's choose a report") - - with pytest.raises(PermissionDenied): - OnboardingChooseReportInfoView.as_view()( - setup_request(rf.get("step_choose_report_info"), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingChooseReportInfoView.as_view()( - setup_request(rf.get("step_choose_report_info"), client_member.user), - organization_code=client_member.organization.code, - ) + assertContains(response, "KAT introduction") + assertContains(response, "Reports") + assertContains(response, "Data") + assertContains(response, "Skip onboarding") + assertContains(response, "Let's choose a report") -def test_onboarding_choose_report_type(rf, superuser_member, admin_member, redteam_member, client_member): - response_superuser = OnboardingChooseReportTypeView.as_view()( - setup_request(rf.get("step_choose_report_type"), superuser_member.user), - organization_code=superuser_member.organization.code, - ) - response_redteam = OnboardingChooseReportTypeView.as_view()( - setup_request(rf.get("step_choose_report_type"), redteam_member.user), - organization_code=redteam_member.organization.code, +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_choose_report_type(request, member, rf): + member = request.getfixturevalue(member) + response = OnboardingChooseReportTypeView.as_view()( + setup_request(rf.get("step_choose_report_type"), member.user), + organization_code=member.organization.code, ) - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 - assertContains(response_redteam, "KAT introduction") - assertContains(response_redteam, "Choose a report - Type") - assertContains(response_redteam, "Skip onboarding") - assertContains(response_redteam, "DNS report") - assertContains(response_redteam, "Pen test") - assertContains(response_redteam, "Mail report") - assertContains(response_redteam, "DigiD") - - with pytest.raises(PermissionDenied): - OnboardingChooseReportTypeView.as_view()( - setup_request(rf.get("step_choose_report_type"), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingChooseReportTypeView.as_view()( - setup_request(rf.get("step_choose_report_type"), client_member.user), - organization_code=client_member.organization.code, - ) + assert response.status_code == 200 + assertContains(response, "KAT introduction") + assertContains(response, "Choose a report - Type") + assertContains(response, "Skip onboarding") + assertContains(response, "DNS report") + assertContains(response, "Pen test") + assertContains(response, "Mail report") + assertContains(response, "DigiD") -def test_onboarding_setup_scan(rf, superuser_member, admin_member, redteam_member, client_member): - response_superuser = OnboardingSetupScanOOIInfoView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_info"), superuser_member.user), - organization_code=superuser_member.organization.code, - ) - response_redteam = OnboardingSetupScanOOIInfoView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_info"), redteam_member.user), - organization_code=redteam_member.organization.code, +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_setup_scan(request, member, rf): + member = request.getfixturevalue(member) + response = OnboardingSetupScanOOIInfoView.as_view()( + setup_request(rf.get("step_setup_scan_ooi_info"), member.user), + organization_code=member.organization.code, ) - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 - - assertContains(response_redteam, "KAT introduction") - assertContains(response_redteam, "Setup scan") - assertContains(response_redteam, "Let OpenKAT know what object to scan") - assertContains(response_redteam, "Understanding objects") - assertContains(response_redteam, "Skip onboarding") - assertContains(response_redteam, "Add URL") + assert response.status_code == 200 - with pytest.raises(PermissionDenied): - OnboardingSetupScanOOIInfoView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_info"), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingSetupScanOOIInfoView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_info"), client_member.user), - organization_code=client_member.organization.code, - ) + assertContains(response, "KAT introduction") + assertContains(response, "Setup scan") + assertContains(response, "Let OpenKAT know what object to scan") + assertContains(response, "Understanding objects") + assertContains(response, "Skip onboarding") + assertContains(response, "Add URL") -def test_onboarding_setup_scan_detail( - rf, superuser_member, admin_member, redteam_member, client_member, mock_organization_view_octopoes, network -): +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_setup_scan_detail(request, member, rf, mock_organization_view_octopoes, network): mock_organization_view_octopoes().get.return_value = network - - response_superuser = OnboardingSetupScanOOIAddView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_add"), superuser_member.user), - ooi_type="Network", - organization_code=superuser_member.organization.code, - ) - response_redteam = OnboardingSetupScanOOIAddView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_add"), redteam_member.user), + member = request.getfixturevalue(member) + response = OnboardingSetupScanOOIAddView.as_view()( + setup_request(rf.get("step_setup_scan_ooi_add"), member.user), ooi_type="Network", - organization_code=redteam_member.organization.code, + organization_code=member.organization.code, ) - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 + assert response.status_code == 200 - assertContains(response_redteam, "KAT introduction") - assertContains(response_redteam, "Setup scan") - assertContains(response_redteam, "Creating an object") - assertContains(response_redteam, "Dependencies") - assertContains(response_redteam, "Create object") - assertContains(response_redteam, "Skip onboarding") - - with pytest.raises(PermissionDenied): - OnboardingSetupScanOOIAddView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_add"), admin_member.user), - ooi_type="Network", - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingSetupScanOOIAddView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_add"), client_member.user), - ooi_type="Network", - organization_code=client_member.organization.code, - ) + assertContains(response, "KAT introduction") + assertContains(response, "Setup scan") + assertContains(response, "Creating an object") + assertContains(response, "Dependencies") + assertContains(response, "Create object") + assertContains(response, "Skip onboarding") +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) def test_onboarding_setup_scan_detail_create_ooi( - rf, redteam_member, mock_organization_view_octopoes, network, mock_bytes_client + request, member, rf, mock_organization_view_octopoes, network, mock_bytes_client ): mock_organization_view_octopoes().get.return_value = network + member = request.getfixturevalue(member) - request = setup_request( - rf.post("step_setup_scan_ooi_add", {"network": "Network|internet", "raw": "http://example.org", "web_url": ""}), - redteam_member.user, - ) response = OnboardingSetupScanOOIAddView.as_view()( - request, ooi_type="URL", organization_code=redteam_member.organization.code + setup_request( + rf.post( + "step_setup_scan_ooi_add", {"network": "Network|internet", "raw": "http://example.org", "web_url": ""} + ), + member.user, + ), + ooi_type="URL", + organization_code=member.organization.code, ) assert response.status_code == 302 assert response.headers["Location"] == get_ooi_url( - "step_set_clearance_level", "URL|internet|http://example.org", redteam_member.organization.code + "step_set_clearance_level", "URL|internet|http://example.org", member.organization.code ) @@ -235,132 +164,84 @@ def test_onboarding_set_clearance_level( ) +@pytest.mark.parametrize("member", ["superuser_member", "redteam_member"]) def test_onboarding_select_plugins( + request, + member, rf, - superuser_member, - admin_member, - redteam_member, - client_member, mock_views_katalogus, mock_organization_view_octopoes, network, ): mock_organization_view_octopoes().get.return_value = network + member = request.getfixturevalue(member) + request = setup_request(rf.get("step_setup_scan_select_plugins", {"ooi_id": "Network|internet"}), member.user) - request_superuser = setup_request( - rf.get("step_setup_scan_select_plugins", {"ooi_id": "Network|internet"}), superuser_member.user - ) - request_redteam = setup_request( - rf.get("step_setup_scan_select_plugins", {"ooi_id": "Network|internet"}), redteam_member.user - ) - - request_superuser.session["clearance_level"] = "2" - request_redteam.session["clearance_level"] = "2" - - response_superuser = OnboardingSetupScanSelectPluginsView.as_view()( - request_superuser, organization_code=superuser_member.organization.code - ) - response_redteam = OnboardingSetupScanSelectPluginsView.as_view()( - request_redteam, organization_code=redteam_member.organization.code - ) + request.session["clearance_level"] = "2" - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 + response = OnboardingSetupScanSelectPluginsView.as_view()(request, organization_code=member.organization.code) - assertContains(response_redteam, "Setup scan - Enable plugins") - assertContains(response_redteam, "Plugins introduction") - assertContains(response_redteam, "Boefjes") - assertContains(response_redteam, "Normalizers") - assertContains(response_redteam, "Bits") - assertContains(response_redteam, "Suggested plugins") - assertContains(response_redteam, "Skip onboarding") - assertContains(response_redteam, "Enable and start scan") + assert response.status_code == 200 - with pytest.raises(PermissionDenied): - OnboardingSetupScanSelectPluginsView.as_view()( - setup_request(rf.get("step_setup_scan_select_plugins", {"ooi_id": "Network|internet"}), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingSetupScanSelectPluginsView.as_view()( - setup_request(rf.get("step_setup_scan_select_plugins", {"ooi_id": "Network|internet"}), client_member.user), - organization_code=client_member.organization.code, - ) + assertContains(response, "Setup scan - Enable plugins") + assertContains(response, "Plugins introduction") + assertContains(response, "Boefjes") + assertContains(response, "Normalizers") + assertContains(response, "Bits") + assertContains(response, "Suggested plugins") + assertContains(response, "Skip onboarding") + assertContains(response, "Enable and start scan") -def test_onboarding_ooi_detail_scan( - rf, superuser_member, admin_member, redteam_member, client_member, mock_organization_view_octopoes, network +@pytest.mark.parametrize("member", ["admin_member", "client_member"]) +def test_onboarding_select_plugins_perms( + request, + member, + rf, + mock_views_katalogus, + mock_organization_view_octopoes, + network, ): mock_organization_view_octopoes().get.return_value = network + member = request.getfixturevalue(member) + request = setup_request(rf.get("step_setup_scan_select_plugins", {"ooi_id": "Network|internet"}), member.user) - request_superuser = setup_request( - rf.get("step_setup_scan_ooi_detail", {"ooi_id": "Network|internet"}), superuser_member.user - ) - request_redteam = setup_request( - rf.get("step_setup_scan_ooi_detail", {"ooi_id": "Network|internet"}), redteam_member.user - ) + request.session["clearance_level"] = "2" + with pytest.raises(PermissionDenied): + OnboardingSetupScanSelectPluginsView.as_view()(request, organization_code=member.organization.code) - request_superuser.session["clearance_level"] = "2" - request_redteam.session["clearance_level"] = "2" - response_superuser = OnboardingSetupScanOOIDetailView.as_view()( - request_superuser, organization_code=superuser_member.organization.code - ) - response_redteam = OnboardingSetupScanOOIDetailView.as_view()( - request_redteam, organization_code=redteam_member.organization.code - ) +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_ooi_detail_scan(request, member, rf, mock_organization_view_octopoes, network): + mock_organization_view_octopoes().get.return_value = network + member = request.getfixturevalue(member) + request = setup_request(rf.get("step_setup_scan_ooi_detail", {"ooi_id": "Network|internet"}), member.user) + request.session["clearance_level"] = "2" - assert response_redteam.status_code == 200 - assert response_superuser.status_code == 200 + response = OnboardingSetupScanOOIDetailView.as_view()(request, organization_code=member.organization.code) - assertContains(response_redteam, "KAT introduction") - assertContains(response_redteam, "Setup scan") - assertContains(response_redteam, "Creating an object") - assertContains(response_redteam, "Network") - assertContains(response_redteam, "Skip onboarding") - assertContains(response_redteam, "Start scanning") + assert response.status_code == 200 - with pytest.raises(PermissionDenied): - OnboardingSetupScanOOIDetailView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_detail", {"ooi_id": "Network|internet"}), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingSetupScanOOIDetailView.as_view()( - setup_request(rf.get("step_setup_scan_ooi_detail", {"ooi_id": "Network|internet"}), client_member.user), - organization_code=client_member.organization.code, - ) + assertContains(response, "KAT introduction") + assertContains(response, "Setup scan") + assertContains(response, "Creating an object") + assertContains(response, "Network") + assertContains(response, "Skip onboarding") + assertContains(response, "Start scanning") -def test_onboarding_scanning_boefjes( - rf, superuser_member, admin_member, redteam_member, client_member, mock_organization_view_octopoes, network -): +@pytest.mark.parametrize("member", ["superuser_member", "admin_member", "redteam_member", "client_member"]) +def test_onboarding_scanning_boefjes(request, member, rf, mock_organization_view_octopoes, network): mock_organization_view_octopoes().get.return_value = network - - response_superuser = OnboardingReportView.as_view()( - setup_request(rf.get("step_report", {"ooi_id": "Network|internet"}), superuser_member.user), - organization_code=superuser_member.organization.code, - ) - response_redteam = OnboardingReportView.as_view()( - setup_request(rf.get("step_report", {"ooi_id": "Network|internet"}), redteam_member.user), - organization_code=redteam_member.organization.code, + member = request.getfixturevalue(member) + response = OnboardingReportView.as_view()( + setup_request(rf.get("step_report", {"ooi_id": "Network|internet"}), member.user), + organization_code=member.organization.code, ) - assert response_superuser.status_code == 200 - assert response_redteam.status_code == 200 + assert response.status_code == 200 - assertContains(response_redteam, "KAT introduction") - assertContains(response_redteam, "Report") - assertContains(response_redteam, "Boefjes are scanning") - assertContains(response_redteam, "Open my DNS-report") - - with pytest.raises(PermissionDenied): - OnboardingReportView.as_view()( - setup_request(rf.get("step_report", {"ooi_id": "Network|internet"}), admin_member.user), - organization_code=admin_member.organization.code, - ) - with pytest.raises(PermissionDenied): - OnboardingReportView.as_view()( - setup_request(rf.get("step_report", {"ooi_id": "Network|internet"}), client_member.user), - organization_code=client_member.organization.code, - ) + assertContains(response, "KAT introduction") + assertContains(response, "Report") + assertContains(response, "Boefjes are scanning") + assertContains(response, "Open my DNS-report") diff --git a/rocky/tools/management/commands/setup_dev_account.py b/rocky/tools/management/commands/setup_dev_account.py index f1d0604439a..ed3ffa42a87 100644 --- a/rocky/tools/management/commands/setup_dev_account.py +++ b/rocky/tools/management/commands/setup_dev_account.py @@ -45,6 +45,7 @@ def handle(self, *args, **options): "view_organizationmember", "add_organizationmember", "change_organization", + "can_scan_organization", "change_organizationmember", "can_delete_oois", "add_indemnification", diff --git a/rocky/tools/migrations/0040_update_admin_permission.py b/rocky/tools/migrations/0040_update_admin_permission.py new file mode 100644 index 00000000000..84a507b8d33 --- /dev/null +++ b/rocky/tools/migrations/0040_update_admin_permission.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.1 on 2023-07-18 17:18 +from django.contrib.auth.management import create_permissions +from django.db import migrations + +from tools.models import GROUP_ADMIN + + +def migrate_permissions(apps, schema_editor): + for app_config in apps.get_app_configs(): + app_config.models_module = True + create_permissions(app_config, apps=apps, verbosity=0) + app_config.models_module = None + + +def add_admin_permission(apps, schema_editor): + Group = apps.get_model("auth", "Group") + Permission = apps.get_model("auth", "Permission") + try: + admin = Group.objects.get(name=GROUP_ADMIN) + admin.permissions.add(Permission.objects.get(codename="can_scan_organization")) + except Group.DoesNotExist: + pass + + +class Migration(migrations.Migration): + dependencies = [ + ("tools", "0039_update_permissions"), + ] + + operations = [ + migrations.RunPython(migrate_permissions), + migrations.RunPython(add_admin_permission), + ] diff --git a/rocky/tools/models.py b/rocky/tools/models.py index 217335cabc3..e0391041867 100644 --- a/rocky/tools/models.py +++ b/rocky/tools/models.py @@ -222,18 +222,6 @@ class STATUSES(models.TextChoices): default=-1, validators=[MinValueValidator(-1), MaxValueValidator(max(scan_levels))] ) - @cached_property - def is_admin(self) -> bool: - return self.groups.filter(name=GROUP_ADMIN).exists() - - @cached_property - def is_redteam(self) -> bool: - return self.groups.filter(name=GROUP_REDTEAM).exists() - - @cached_property - def is_client(self) -> bool: - return self.groups.filter(name=GROUP_CLIENT).exists() - @cached_property def all_permissions(self) -> Set[str]: if self.user.is_active and self.user.is_superuser: From 26145cecc39a57360e610ed82f2764b566cdb827 Mon Sep 17 00:00:00 2001 From: Donny Peeters <46660228+Donnype@users.noreply.github.com> Date: Wed, 19 Jul 2023 14:16:37 +0200 Subject: [PATCH 12/18] Add `task_id` as a query parameter to the `GET /origins` endpoint (#1414) --- octopoes/octopoes/api/router.py | 13 +++- octopoes/octopoes/connector/octopoes.py | 23 +++++-- octopoes/octopoes/core/service.py | 10 +-- .../repositories/origin_repository.py | 69 +++++++++---------- .../tests/integration/test_api_connector.py | 18 ++++- .../tests/robot/04_save_declaration.robot | 2 +- octopoes/tests/robot/07_rerun_bits.robot | 2 +- octopoes/tests/test_octopoes_service.py | 4 +- octopoes/tests/test_query_builder_new.py | 18 +++++ rocky/rocky/views/mixins.py | 2 +- 10 files changed, 109 insertions(+), 52 deletions(-) diff --git a/octopoes/octopoes/api/router.py b/octopoes/octopoes/api/router.py index 80c5f6556c8..97d5b26810e 100644 --- a/octopoes/octopoes/api/router.py +++ b/octopoes/octopoes/api/router.py @@ -204,9 +204,18 @@ def get_tree( def list_origins( octopoes: OctopoesService = Depends(octopoes_service), valid_time: datetime = Depends(extract_valid_time), - reference: Reference = Depends(extract_reference), + source: Optional[Reference] = Query(None), + result: Optional[Reference] = Query(None), + task_id: Optional[str] = Query(None), + origin_type: Optional[OriginType] = Query(None), ) -> List[Origin]: - return octopoes.origin_repository.list_by_result(reference, valid_time) + return octopoes.origin_repository.list( + valid_time, + task_id=task_id, + source=source, + result=result, + origin_type=origin_type, + ) @router.get("/origin_parameters", tags=["Origins"]) diff --git a/octopoes/octopoes/connector/octopoes.py b/octopoes/octopoes/connector/octopoes.py index 35bdb41af68..7b429ca42c7 100644 --- a/octopoes/octopoes/connector/octopoes.py +++ b/octopoes/octopoes/connector/octopoes.py @@ -24,7 +24,7 @@ from octopoes.models.exception import ObjectNotFoundException from octopoes.models.explanation import InheritanceSection from octopoes.models.ooi.findings import Finding, RiskLevelSeverity -from octopoes.models.origin import Origin, OriginParameter +from octopoes.models.origin import Origin, OriginParameter, OriginType from octopoes.models.pagination import Paginated from octopoes.models.tree import ReferenceTree from octopoes.models.types import OOIType @@ -131,9 +131,24 @@ def get_tree( ) return ReferenceTree.parse_obj(res.json()) - def list_origins(self, reference: Reference, valid_time: Optional[datetime] = None) -> List[Origin]: - params = {"reference": str(reference), "valid_time": valid_time} - res = self.session.get(f"/{self.client}/origins", params=params) + def list_origins( + self, + valid_time: Optional[datetime] = None, + source: Optional[Reference] = None, + result: Optional[Reference] = None, + task_id: Optional[str] = None, + origin_type: Optional[OriginType] = None, + ) -> List[Origin]: + res = self.session.get( + f"/{self.client}/origins", + params={ + "valid_time": valid_time, + "source": source, + "result": result, + "task_id": task_id, + "origin_type": origin_type, + }, + ) return parse_obj_as(List[Origin], res.json()) def save_observation(self, observation: Observation) -> None: diff --git a/octopoes/octopoes/core/service.py b/octopoes/octopoes/core/service.py index 16a8c34d877..02afddd3547 100644 --- a/octopoes/octopoes/core/service.py +++ b/octopoes/octopoes/core/service.py @@ -117,7 +117,7 @@ def get_ooi_tree( return tree def _delete_ooi(self, reference: Reference, valid_time: datetime) -> None: - referencing_origins = self.origin_repository.list_by_result(reference, valid_time) + referencing_origins = self.origin_repository.list(valid_time, result=reference) if not referencing_origins: self.ooi_repository.delete(reference, valid_time) @@ -368,7 +368,7 @@ def _on_create_ooi(self, event: OOIDBEvent) -> None: self.origin_parameter_repository.save(origin_parameter, event.valid_time) def _on_update_ooi(self, event: OOIDBEvent) -> None: - inference_origins = self.origin_repository.list_by_source(event.new_data.reference, valid_time=event.valid_time) + inference_origins = self.origin_repository.list(event.valid_time, source=event.new_data.reference) inference_params = self.origin_parameter_repository.list_by_reference( event.new_data.reference, valid_time=event.valid_time ) @@ -383,7 +383,7 @@ def _on_delete_ooi(self, event: OOIDBEvent) -> None: reference = event.old_data.reference # delete related origins to which it is a source - origins = self.origin_repository.list_by_source(reference, event.valid_time) + origins = self.origin_repository.list(event.valid_time, source=reference) for origin in origins: self.origin_repository.delete(origin, event.valid_time) @@ -435,7 +435,7 @@ def _on_delete_origin_parameter(self, event: OriginParameterDBEvent) -> None: return def _run_inferences(self, event: ScanProfileDBEvent) -> None: - inference_origins = self.origin_repository.list_by_source(event.reference, valid_time=event.valid_time) + inference_origins = self.origin_repository.list(event.valid_time, source=event.reference) inference_origins = [o for o in inference_origins if o.origin_type == OriginType.INFERENCE] for inference_origin in inference_origins: self._run_inference(inference_origin, event.valid_time) @@ -557,7 +557,7 @@ def recalculate_bits(self) -> int: # TODO: remove all Origins and Origin Parameters, which are no longer in use # rerun all existing bits - origins = self.origin_repository.list(origin_type=OriginType.INFERENCE, valid_time=valid_time) + origins = self.origin_repository.list(valid_time, origin_type=OriginType.INFERENCE) for origin in origins: self._run_inference(origin, valid_time) bit_counter.update({origin.method}) diff --git a/octopoes/octopoes/repositories/origin_repository.py b/octopoes/octopoes/repositories/origin_repository.py index 8a2856d8761..3a334059669 100644 --- a/octopoes/octopoes/repositories/origin_repository.py +++ b/octopoes/octopoes/repositories/origin_repository.py @@ -1,7 +1,7 @@ from datetime import datetime from http import HTTPStatus from logging import getLogger -from typing import Any, Dict, List +from typing import Any, Dict, List, Optional from requests import HTTPError @@ -29,18 +29,20 @@ def get(self, origin_id: str, valid_time: datetime) -> Origin: def save(self, origin: Origin, valid_time: datetime) -> None: raise NotImplementedError - def list_by_result(self, reference: Reference, valid_time: datetime) -> List[Origin]: - raise NotImplementedError - - def list_by_source(self, reference: Reference, valid_time: datetime) -> List[Origin]: + def list( + self, + valid_time: datetime, + *, + task_id: Optional[str] = None, + source: Optional[Reference] = None, + result: Optional[Reference] = None, + origin_type: Optional[OriginType] = None, + ) -> List[Origin]: raise NotImplementedError def delete(self, origin: Origin, valid_time: datetime) -> None: raise NotImplementedError - def list(self, origin_type: OriginType, valid_time: datetime) -> List[Origin]: - raise NotImplementedError - class XTDBOriginRepository(OriginRepository): xtdb_type: XTDBType = XTDBType.CRUX @@ -65,27 +67,35 @@ def serialize(cls, origin: Origin) -> Dict[str, Any]: def deserialize(cls, data: Dict[str, Any]) -> Origin: return Origin.parse_obj(data) - def list_by_result(self, reference: Reference, valid_time: datetime) -> List[Origin]: - query = generate_pull_query( - self.xtdb_type, - FieldSet.ALL_FIELDS, - { - "result": str(reference), - "type": Origin.__name__, - }, - ) - results = self.session.client.query(query, valid_time=valid_time) - return [self.deserialize(r[0]) for r in results] + def list( + self, + valid_time: datetime, + *, + task_id: Optional[str] = None, + source: Optional[Reference] = None, + result: Optional[Reference] = None, + origin_type: Optional[OriginType] = None, + ) -> List[Origin]: + where_parameters = {"type": Origin.__name__} + + if task_id: + where_parameters["task_id"] = task_id + + if source: + where_parameters["source"] = str(source) + + if result: + where_parameters["result"] = str(result) + + if origin_type: + where_parameters["origin_type"] = origin_type.value - def list_by_source(self, reference, valid_time) -> List[Origin]: query = generate_pull_query( self.xtdb_type, FieldSet.ALL_FIELDS, - { - "source": str(reference), - "type": Origin.__name__, - }, + where_parameters, ) + results = self.session.client.query(query, valid_time=valid_time) return [self.deserialize(r[0]) for r in results] @@ -126,14 +136,3 @@ def delete(self, origin: Origin, valid_time: datetime) -> None: old_data=origin, ) self.session.listen_post_commit(lambda: self.event_manager.publish(event)) - - def list(self, origin_type: OriginType, valid_time: datetime) -> List[Origin]: - query = generate_pull_query( - self.xtdb_type, - FieldSet.ALL_FIELDS, - { - "origin_type": origin_type.value, - }, - ) - results = self.session.client.query(query, valid_time=valid_time) - return [self.deserialize(r[0]) for r in results] diff --git a/octopoes/tests/integration/test_api_connector.py b/octopoes/tests/integration/test_api_connector.py index 46c10336e87..46d707f4602 100644 --- a/octopoes/tests/integration/test_api_connector.py +++ b/octopoes/tests/integration/test_api_connector.py @@ -11,6 +11,7 @@ from octopoes.models import OOI, DeclaredScanProfile, Reference, ScanLevel from octopoes.models.ooi.dns.zone import Hostname from octopoes.models.ooi.network import Network +from octopoes.models.origin import OriginType from octopoes.repositories.ooi_repository import XTDBOOIRepository if os.environ.get("CI") != "1": @@ -29,11 +30,13 @@ def test_bulk_operations(octopoes_api_connector: OctopoesAPIConnector, valid_tim ) ) hostnames: List[OOI] = [Hostname(network=network.reference, name=f"test{i}") for i in range(10)] + task_id = uuid.uuid4().hex + octopoes_api_connector.save_observation( Observation( method="normalizer_id", source=network.reference, - task_id=str(uuid.uuid4()), + task_id=task_id, valid_time=valid_time, result=hostnames, ) @@ -47,6 +50,19 @@ def test_bulk_operations(octopoes_api_connector: OctopoesAPIConnector, valid_tim assert octopoes_api_connector.list(types={Hostname}).count == 10 assert octopoes_api_connector.list(types={Network, Hostname}).count == 11 + assert len(octopoes_api_connector.list_origins(task_id="abc")) == 0 + origins = octopoes_api_connector.list_origins(task_id=task_id) + assert len(origins) == 1 + assert origins[0].dict() == { + "method": "normalizer_id", + "origin_type": OriginType.OBSERVATION, + "source": network.reference, + "result": [hostname.reference for hostname in hostnames], + "task_id": task_id, + } + + assert len(octopoes_api_connector.list_origins(result=hostnames[0].reference)) == 1 + # Delete even-numbered test hostnames octopoes_api_connector.delete_many([Reference.from_str(f"Hostname|test|test{i}") for i in range(0, 10, 2)]) assert octopoes_api_connector.list(types={Network, Hostname}).count == 6 diff --git a/octopoes/tests/robot/04_save_declaration.robot b/octopoes/tests/robot/04_save_declaration.robot index 6adc64aee11..f3f1e9c6037 100644 --- a/octopoes/tests/robot/04_save_declaration.robot +++ b/octopoes/tests/robot/04_save_declaration.robot @@ -27,7 +27,7 @@ Teardown Test Verify Origin Present [Arguments] ${reference} ${origin_task_id} - ${response} Get ${OCTOPOES_URI}/origins params=reference=${reference} + ${response} Get ${OCTOPOES_URI}/origins params=result=${reference} Should Be Equal As Integers ${response.status_code} 200 ${length} Get Length ${response.json()} Should Be Equal As Integers ${length} 1 diff --git a/octopoes/tests/robot/07_rerun_bits.robot b/octopoes/tests/robot/07_rerun_bits.robot index 9ed22fb994f..039df46d1f4 100644 --- a/octopoes/tests/robot/07_rerun_bits.robot +++ b/octopoes/tests/robot/07_rerun_bits.robot @@ -50,7 +50,7 @@ Teardown Test Verify Origin Present [Arguments] ${reference} ${expected_amout_of_origins} - ${response} Get ${OCTOPOES_URI}/origins params=reference=${reference} + ${response} Get ${OCTOPOES_URI}/origins params=result=${reference} Should Be Equal As Integers ${response.status_code} 200 ${length} Get Length ${response.json()} Should Be Equal As Integers ${length} ${expected_amout_of_origins} diff --git a/octopoes/tests/test_octopoes_service.py b/octopoes/tests/test_octopoes_service.py index 406698e5f0b..3a9e593a306 100644 --- a/octopoes/tests/test_octopoes_service.py +++ b/octopoes/tests/test_octopoes_service.py @@ -93,7 +93,7 @@ def test_on_update_origin(octopoes_service, valid_time): ) # and the deferenced ooi is no longer referred to by any origins - octopoes_service.origin_repository.list_by_result.return_value = [] + octopoes_service.origin_repository.list.return_value = [] octopoes_service.process_event(event) # the ooi should be deleted @@ -105,7 +105,7 @@ def test_on_update_origin(octopoes_service, valid_time): @pytest.mark.parametrize("new_data", [EmptyScanProfile(reference="test_reference"), None]) @pytest.mark.parametrize("old_data", [EmptyScanProfile(reference="test_reference"), None]) def test_on_create_scan_profile(octopoes_service, new_data, old_data, bit_runner: MagicMock): - octopoes_service.origin_repository.list_by_source.return_value = [ + octopoes_service.origin_repository.list.return_value = [ Origin( origin_type=OriginType.INFERENCE, method="check-csp-header", diff --git a/octopoes/tests/test_query_builder_new.py b/octopoes/tests/test_query_builder_new.py index 432881f804d..596da3d1d69 100644 --- a/octopoes/tests/test_query_builder_new.py +++ b/octopoes/tests/test_query_builder_new.py @@ -1,6 +1,7 @@ from unittest import TestCase from octopoes.config.settings import XTDBType +from octopoes.models.origin import Origin from octopoes.xtdb import ( Datamodel, FieldSet, @@ -215,6 +216,7 @@ def test_generate_query_sucess(self): def test_escape_injection_success(self): query = generate_pull_query( + XTDBType.CRUX, FieldSet.ALL_FIELDS, where={"attr_1": 'test_value_with_quotes" and injection'}, ) @@ -227,3 +229,19 @@ def test_escape_injection_success(self): expected_query, query, ) + + def test_get_origin_by_task_id(self): + query = generate_pull_query( + XTDBType.CRUX, + FieldSet.ALL_FIELDS, + { + "task_id": "5c864d45a4364a81a5fecfd8b359cf9d", + "type": Origin.__name__, + }, + ) + + expected_query = ( + "{:query {:find [(pull ?e [*])] :in [_task_id _type] :where [[?e :task_id _task_id] " + '[?e :type _type]] } :in-args [ "5c864d45a4364a81a5fecfd8b359cf9d" "Origin" ]}' + ) + self.assertEqual(expected_query, query) diff --git a/rocky/rocky/views/mixins.py b/rocky/rocky/views/mixins.py index f3347c80585..e70ed92feeb 100644 --- a/rocky/rocky/views/mixins.py +++ b/rocky/rocky/views/mixins.py @@ -77,7 +77,7 @@ def get_origins( organization: Organization, ) -> Tuple[List[OriginData], List[OriginData], List[OriginData]]: try: - origins = self.octopoes_api_connector.list_origins(reference, valid_time) + origins = self.octopoes_api_connector.list_origins(valid_time, result=reference) origin_data = [OriginData(origin=origin) for origin in origins] for origin in origin_data: From 8f2b5a2ca7c3b051dd32123efd2ac2e0e26956c3 Mon Sep 17 00:00:00 2001 From: Jeroen Dekkers Date: Wed, 19 Jul 2023 14:34:52 +0200 Subject: [PATCH 13/18] Add and use our own CVE API (#1383) Co-authored-by: Patrick --- .github/workflows/debian_package.yml | 12 +- .pre-commit-config.yaml | 1 + .../plugins/kat_cve_finding_types/boefje.json | 4 +- .../plugins/kat_cve_finding_types/main.py | 4 +- .../kat_cve_finding_types/normalize.py | 22 ++- .../plugins/kat_cve_finding_types/schema.json | 13 ++ .../examples/inputs/cve-result-with-cvss.json | 170 +++++++++--------- .../inputs/cve-result-without-cvss.json | 131 ++++++++------ cveapi/cveapi.py | 94 ++++++++++ cveapi/debian/control | 18 ++ cveapi/debian/copyright | 10 ++ cveapi/debian/kat-cveapi.service | 31 ++++ cveapi/debian/kat-cveapi.sysusers | 1 + cveapi/debian/kat-cveapi.timer | 11 ++ cveapi/debian/rules | 8 + .../packaging/scripts/build-debian-package.sh | 18 ++ cveapi/poetry.lock | 150 ++++++++++++++++ cveapi/pyproject.toml | 21 +++ docs/source/release_notes/1.10.rst | 4 + docs/source/technical_design/cveapi.rst | 35 ++++ docs/source/technical_design/index.rst | 1 + pyproject.toml | 1 + .../templates/step_1_introduction.html | 2 +- 23 files changed, 613 insertions(+), 149 deletions(-) create mode 100644 boefjes/boefjes/plugins/kat_cve_finding_types/schema.json create mode 100644 cveapi/cveapi.py create mode 100644 cveapi/debian/control create mode 100644 cveapi/debian/copyright create mode 100644 cveapi/debian/kat-cveapi.service create mode 100644 cveapi/debian/kat-cveapi.sysusers create mode 100644 cveapi/debian/kat-cveapi.timer create mode 100755 cveapi/debian/rules create mode 100755 cveapi/packaging/scripts/build-debian-package.sh create mode 100644 cveapi/poetry.lock create mode 100644 cveapi/pyproject.toml create mode 100644 docs/source/technical_design/cveapi.rst diff --git a/.github/workflows/debian_package.yml b/.github/workflows/debian_package.yml index 3567e69d50b..849fb66fc6a 100644 --- a/.github/workflows/debian_package.yml +++ b/.github/workflows/debian_package.yml @@ -27,6 +27,9 @@ jobs: bytes: - 'bytes/**' - '.github/workflows/debian_package.yml' + cveapi: + - 'cveapi/**' + - '.github/workflows/debian_package.yml' keiko: - 'keiko/**' - '.github/workflows/debian_package.yml' @@ -49,7 +52,12 @@ jobs: matrix: dist: [debian11, debian12, ubuntu22.04] # On main, release branches and tags we always want to build all the packages - package: ${{ github.event_name == 'push' && fromJSON('["boefjes", "bytes", "keiko", "mula", "octopoes", "rocky"]') || fromJSON(needs.changes.outputs.packages) }} + package: ${{ github.event_name == 'push' && fromJSON('["boefjes", "bytes", "cveapi", "keiko", "mula", "octopoes", "rocky"]') || fromJSON(needs.changes.outputs.packages) }} + exclude: + - package: cveapi + dist: debian11 + - package: cveapi + dist: ubuntu22.04 runs-on: ubuntu-22.04 env: PKG_NAME: kat-${{ matrix.package }} @@ -89,4 +97,4 @@ jobs: uses: actions/upload-artifact@v3 with: name: ${{env.PKG_NAME}}_${{ env.RELEASE_VERSION }}_${{ matrix.dist }}.deb - path: ${{matrix.package}}/build/${{env.PKG_NAME}}_${{ env.RELEASE_VERSION }}_amd64.deb + path: ${{matrix.package}}/build/${{env.PKG_NAME}}_${{ env.RELEASE_VERSION }}_${{ matrix.package == 'cveapi' && 'all' || 'amd64' }}.deb diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 12594662dcd..c58360e9de8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -81,6 +81,7 @@ repos: requirements-.*.txt$ | retirejs.json$ | ^boefjes/boefjes/plugins/kat_fierce/lists | + ^boefjes/tests/examples/inputs/cve-result-without-cvss.json | ^keiko/glossaries | ^keiko/templates/.*/template.tex$ | ^rocky/assets/js/vendor | diff --git a/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json b/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json index 8ff4e2a61cd..49e93918840 100644 --- a/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json +++ b/boefjes/boefjes/plugins/kat_cve_finding_types/boefje.json @@ -8,7 +8,9 @@ "produces": [ "CVEFindingType" ], - "environment_keys": [], + "environment_keys": [ + "CVEAPI_URL" + ], "scan_level": 0, "enabled": true } diff --git a/boefjes/boefjes/plugins/kat_cve_finding_types/main.py b/boefjes/boefjes/plugins/kat_cve_finding_types/main.py index 36337d77753..3c4a0e570da 100644 --- a/boefjes/boefjes/plugins/kat_cve_finding_types/main.py +++ b/boefjes/boefjes/plugins/kat_cve_finding_types/main.py @@ -1,3 +1,4 @@ +from os import getenv from typing import List, Tuple, Union import requests @@ -7,6 +8,7 @@ def run(boefje_meta: BoefjeMeta) -> List[Tuple[set, Union[bytes, str]]]: cve_id = boefje_meta.arguments["input"]["id"] - response = requests.get(f"https://v1.cveapi.com/{cve_id}.json") + cveapi_url = getenv("CVEAPI_URL", "https://cve.openkat.dev/v1") + response = requests.get(f"{cveapi_url}/{cve_id}.json") return [(set(), response.content)] diff --git a/boefjes/boefjes/plugins/kat_cve_finding_types/normalize.py b/boefjes/boefjes/plugins/kat_cve_finding_types/normalize.py index 6fd03f15dcc..4ba84418495 100644 --- a/boefjes/boefjes/plugins/kat_cve_finding_types/normalize.py +++ b/boefjes/boefjes/plugins/kat_cve_finding_types/normalize.py @@ -29,17 +29,27 @@ def run(normalizer_meta: NormalizerMeta, raw: Union[bytes, str]) -> Iterable[OOI cve_finding_type_id = normalizer_meta.raw_data.boefje_meta.arguments["input"]["id"] data = json.loads(raw) - descriptions = data["cve"]["description"]["description_data"] + descriptions = data["cve"]["descriptions"] english_description = [description for description in descriptions if description["lang"] == "en"][0] - if data["impact"] == {}: + if not data["cve"]["metrics"]: risk_severity = RiskLevelSeverity.UNKNOWN risk_score = None else: - try: - risk_score = data["impact"]["baseMetricV3"]["cvssV3"]["baseScore"] - except KeyError: - risk_score = data["impact"]["baseMetricV2"]["cvssV2"]["baseScore"] + metrics = data["cve"]["metrics"] + if "cvssMetricV31" in metrics: + cvss = metrics["cvssMetricV31"] + elif "cvssMetricV30" in metrics: + cvss = metrics["cvssMetricV30"] + else: + cvss = metrics["cvssMetricV20"] + + for item in cvss: + if item["type"] == "Primary": + risk_score = item["cvssData"]["baseScore"] + break + else: + risk_score = cvss[0]["cvssData"]["baseScore"] risk_severity = get_risk_level(risk_score) yield CVEFindingType( diff --git a/boefjes/boefjes/plugins/kat_cve_finding_types/schema.json b/boefjes/boefjes/plugins/kat_cve_finding_types/schema.json new file mode 100644 index 00000000000..78d6295db6a --- /dev/null +++ b/boefjes/boefjes/plugins/kat_cve_finding_types/schema.json @@ -0,0 +1,13 @@ +{ + "title": "Arguments", + "type": "object", + "properties": { + "CVEAPI_URL": { + "title": "CVEAPI_URL", + "maxLength": 2048, + "type": "string", + "description": "URL of the CVE API, defaults to https://cve.openkat.dev/v1", + "default": "https://cve.openkat.dev/v1" + } + } +} diff --git a/boefjes/tests/examples/inputs/cve-result-with-cvss.json b/boefjes/tests/examples/inputs/cve-result-with-cvss.json index 2d0f91f69a3..2d17c322c8b 100644 --- a/boefjes/tests/examples/inputs/cve-result-with-cvss.json +++ b/boefjes/tests/examples/inputs/cve-result-with-cvss.json @@ -1,101 +1,97 @@ { "cve": { - "data_type": "CVE", - "data_format": "MITRE", - "data_version": "4.0", - "CVE_data_meta": { - "ID": "CVE-2021-46882", - "ASSIGNER": "psirt@huawei.com" - }, - "problemtype": { - "problemtype_data": [ - { - "description": [ - { - "lang": "en", - "value": "CWE-120" - } - ] - } - ] - }, - "references": { - "reference_data": [ + "id": "CVE-2021-46882", + "sourceIdentifier": "psirt@huawei.com", + "published": "2023-05-26T17:15:12.703", + "lastModified": "2023-05-29T03:38:59.390", + "vulnStatus": "Analyzed", + "descriptions": [ + { + "lang": "en", + "value": "The video framework has memory overwriting caused by addition overflow. Successful exploitation of this vulnerability may affect availability." + } + ], + "metrics": { + "cvssMetricV31": [ { - "url": "https://consumer.huawei.com/en/support/bulletin/2023/5/", - "name": "https://consumer.huawei.com/en/support/bulletin/2023/5/", - "refsource": "MISC", - "tags": [ - "Vendor Advisory" - ] + "source": "nvd@nist.gov", + "type": "Primary", + "cvssData": { + "version": "3.1", + "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", + "attackVector": "NETWORK", + "attackComplexity": "LOW", + "privilegesRequired": "NONE", + "userInteraction": "NONE", + "scope": "UNCHANGED", + "confidentialityImpact": "NONE", + "integrityImpact": "NONE", + "availabilityImpact": "HIGH", + "baseScore": 7.5, + "baseSeverity": "HIGH" + }, + "exploitabilityScore": 3.9, + "impactScore": 3.6 } ] }, - "description": { - "description_data": [ - { - "lang": "en", - "value": "The video framework has memory overwriting caused by addition overflow. Successful exploitation of this vulnerability may affect availability." - } - ] - } - }, - "configurations": { - "CVE_data_version": "4.0", - "nodes": [ + "weaknesses": [ { - "operator": "OR", - "children": [], - "cpe_match": [ - { - "vulnerable": true, - "cpe23Uri": "cpe:2.3:o:huawei:emui:10.1.0:*:*:*:*:*:*:*", - "cpe_name": [] - }, + "source": "nvd@nist.gov", + "type": "Primary", + "description": [ { - "vulnerable": true, - "cpe23Uri": "cpe:2.3:o:huawei:emui:10.1.1:*:*:*:*:*:*:*", - "cpe_name": [] - }, - { - "vulnerable": true, - "cpe23Uri": "cpe:2.3:o:huawei:emui:11.0.0:*:*:*:*:*:*:*", - "cpe_name": [] - }, - { - "vulnerable": true, - "cpe23Uri": "cpe:2.3:o:huawei:emui:12.0.0:*:*:*:*:*:*:*", - "cpe_name": [] - }, + "lang": "en", + "value": "CWE-120" + } + ] + } + ], + "configurations": [ + { + "nodes": [ { - "vulnerable": true, - "cpe23Uri": "cpe:2.3:o:huawei:emui:12.0.1:*:*:*:*:*:*:*", - "cpe_name": [] + "operator": "OR", + "negate": false, + "cpeMatch": [ + { + "vulnerable": true, + "criteria": "cpe:2.3:o:huawei:emui:10.1.0:*:*:*:*:*:*:*", + "matchCriteriaId": "66AC7F91-917C-40A6-9983-A339EFB091F1" + }, + { + "vulnerable": true, + "criteria": "cpe:2.3:o:huawei:emui:10.1.1:*:*:*:*:*:*:*", + "matchCriteriaId": "A7FF0AD1-22C2-423B-822A-E6496CEDAB02" + }, + { + "vulnerable": true, + "criteria": "cpe:2.3:o:huawei:emui:11.0.0:*:*:*:*:*:*:*", + "matchCriteriaId": "0B701EC6-8208-4D22-95A6-B07D471A8A8B" + }, + { + "vulnerable": true, + "criteria": "cpe:2.3:o:huawei:emui:12.0.0:*:*:*:*:*:*:*", + "matchCriteriaId": "A974CA73-84E8-480B-BB4C-4A81D0C985B2" + }, + { + "vulnerable": true, + "criteria": "cpe:2.3:o:huawei:emui:12.0.1:*:*:*:*:*:*:*", + "matchCriteriaId": "2DF07E7F-3A18-4B74-B73D-DF3647C2A48F" + } + ] } ] } + ], + "references": [ + { + "url": "https://consumer.huawei.com/en/support/bulletin/2023/5/", + "source": "psirt@huawei.com", + "tags": [ + "Vendor Advisory" + ] + } ] - }, - "impact": { - "baseMetricV3": { - "cvssV3": { - "version": "3.1", - "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:N/A:H", - "attackVector": "NETWORK", - "attackComplexity": "LOW", - "privilegesRequired": "NONE", - "userInteraction": "NONE", - "scope": "UNCHANGED", - "confidentialityImpact": "NONE", - "integrityImpact": "NONE", - "availabilityImpact": "HIGH", - "baseScore": 7.5, - "baseSeverity": "HIGH" - }, - "exploitabilityScore": 3.9, - "impactScore": 3.6 - } - }, - "publishedDate": "2023-05-26T17:15Z", - "lastModifiedDate": "2023-05-29T03:38Z" + } } diff --git a/boefjes/tests/examples/inputs/cve-result-without-cvss.json b/boefjes/tests/examples/inputs/cve-result-without-cvss.json index 637b15bc31d..5e1de67a6c9 100644 --- a/boefjes/tests/examples/inputs/cve-result-without-cvss.json +++ b/boefjes/tests/examples/inputs/cve-result-without-cvss.json @@ -1,55 +1,84 @@ { "cve": { - "data_type": "CVE", - "data_format": "MITRE", - "data_version": "4.0", - "CVE_data_meta": { - "ID": "CVE-2023-2434", - "ASSIGNER": "security@wordfence.com" + "id": "CVE-2023-2434", + "sourceIdentifier": "security@wordfence.com", + "published": "2023-05-31T04:15:10.070", + "lastModified": "2023-06-06T16:27:06.360", + "vulnStatus": "Analyzed", + "descriptions": [ + { + "lang": "en", + "value": "The Nested Pages plugin for WordPress is vulnerable to unauthorized loss of data due to a missing capability check on the 'reset' function in versions up to, and including, 3.2.3. This makes it possible for authenticated attackers, with editor-level permissions and above, to reset plugin settings." + }, + { + "lang": "es", + "value": "El plugin Nested Pages para WordPress es vulnerable a la pérdida no autorizada de datos debido a la falta de capacidad de comprobación de la función \"reset\" en las versiones hasta la 3.2.3 inclusive. Esto hace posible que atacantes autenticados, con permisos de nivel de editor y superiores, restablezcan la configuración del plugin. " + } + ], + "metrics": { }, - "problemtype": { - "problemtype_data": [ - { - "description": [] - } - ] - }, - "references": { - "reference_data": [ - { - "url": "https://plugins.trac.wordpress.org/changeset?sfp_email=&sfph_mail=&reponame=&new=2919175%40wp-nested-pages&old=2814681%40wp-nested-pages&sfp_email=&sfph_mail=", - "name": "https://plugins.trac.wordpress.org/changeset?sfp_email=&sfph_mail=&reponame=&new=2919175%40wp-nested-pages&old=2814681%40wp-nested-pages&sfp_email=&sfph_mail=", - "refsource": "MISC", - "tags": [] - }, - { - "url": "https://www.wordfence.com/threat-intel/vulnerabilities/id/8c3e61e9-3610-41b5-9820-28012dc657fd?source=cve", - "name": "https://www.wordfence.com/threat-intel/vulnerabilities/id/8c3e61e9-3610-41b5-9820-28012dc657fd?source=cve", - "refsource": "MISC", - "tags": [] - }, - { - "url": "https://plugins.trac.wordpress.org/browser/wp-nested-pages/tags/3.2.3/app/Form/Listeners/ResetSettings.php#L12", - "name": "https://plugins.trac.wordpress.org/browser/wp-nested-pages/tags/3.2.3/app/Form/Listeners/ResetSettings.php#L12", - "refsource": "MISC", - "tags": [] - } - ] - }, - "description": { - "description_data": [ - { - "lang": "en", - "value": "The Nested Pages plugin for WordPress is vulnerable to unauthorized loss of data due to a missing capability check on the 'reset' function in versions up to, and including, 3.2.3. This makes it possible for authenticated attackers, with editor-level permissions and above, to reset plugin settings." - } - ] - } - }, - "configurations": { - "CVE_data_version": "4.0", - "nodes": [] - }, - "impact": {}, - "publishedDate": "2023-05-31T04:15Z", - "lastModifiedDate": "2023-05-31T04:15Z" + "weaknesses": [ + { + "source": "nvd@nist.gov", + "type": "Primary", + "description": [ + { + "lang": "en", + "value": "CWE-862" + } + ] + }, + { + "source": "security@wordfence.com", + "type": "Secondary", + "description": [ + { + "lang": "en", + "value": "CWE-862" + } + ] + } + ], + "configurations": [ + { + "nodes": [ + { + "operator": "OR", + "negate": false, + "cpeMatch": [ + { + "vulnerable": true, + "criteria": "cpe:2.3:a:nested_pages_project:nested_pages:*:*:*:*:*:wordpress:*:*", + "versionEndIncluding": "3.2.3", + "matchCriteriaId": "F288252B-FB7B-41FB-9F17-6846B325433F" + } + ] + } + ] + } + ], + "references": [ + { + "url": "https://plugins.trac.wordpress.org/browser/wp-nested-pages/tags/3.2.3/app/Form/Listeners/ResetSettings.php#L12", + "source": "security@wordfence.com", + "tags": [ + "Patch" + ] + }, + { + "url": "https://plugins.trac.wordpress.org/changeset?sfp_email=&sfph_mail=&reponame=&new=2919175%40wp-nested-pages&old=2814681%40wp-nested-pages&sfp_email=&sfph_mail=", + "source": "security@wordfence.com", + "tags": [ + "Patch" + ] + }, + { + "url": "https://www.wordfence.com/threat-intel/vulnerabilities/id/8c3e61e9-3610-41b5-9820-28012dc657fd?source=cve", + "source": "security@wordfence.com", + "tags": [ + "Third Party Advisory" + ] + } + ] + } } diff --git a/cveapi/cveapi.py b/cveapi/cveapi.py new file mode 100644 index 00000000000..e96912d5392 --- /dev/null +++ b/cveapi/cveapi.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 + +import json +import logging +import os +import pathlib +import time +from datetime import datetime, timedelta, timezone +from urllib.parse import quote + +import requests + +logger = logging.getLogger("cveapi") + + +def download_files(directory, last_update, update_timestamp): + index = 0 + session = requests.Session() + error_count = 0 + + while True: + if last_update: + parameters = f"startIndex={index}&lastModStartDate={quote(last_update.isoformat())}" + parameters += f"&lastModEndDate={quote(update_timestamp.isoformat())}" + else: + parameters = f"startIndex={index}" + logger.debug("Parameters are %s", parameters) + + r = session.get(f"https://services.nvd.nist.gov/rest/json/cves/2.0/?{(parameters)}") + if r.status_code != 200: + error_count += 1 + if error_count == 5: + logger.error("Got 5 errors when trying to download data, giving up") + r.raise_for_status() + logger.debug("Error fetching data, sleeping 10 seconds and trying again") + time.sleep(10) + continue + + # Reset error count + error_count = 0 + + response_json = r.json() + + logger.debug("Fetched %d of %d results", response_json["resultsPerPage"], response_json["totalResults"]) + + for cve in response_json["vulnerabilities"]: + filename = directory / f"{cve['cve']['id']}.json" + with filename.open("w") as f: + json.dump(cve, f) + last_modified = datetime.fromisoformat(cve["cve"]["lastModified"]).timestamp() + os.utime(filename, (last_modified, last_modified)) + + if response_json["startIndex"] + response_json["resultsPerPage"] == response_json["totalResults"]: + break + + index += response_json["resultsPerPage"] + + # Ratelimit without API key is 5 requests per 30 seconds + time.sleep(30 / 5) + + logger.info("Downloaded new information of %s CVEs", response_json["totalResults"]) + + +def run(): + loglevel = os.getenv("CVEAPI_LOGLEVEL", "INFO") + numeric_level = getattr(logging, loglevel.upper(), None) + if not isinstance(numeric_level, int): + raise ValueError("Invalid log level: %s" % loglevel) + logging.basicConfig(format="%(message)s", level=numeric_level) + + cveapi_dir = os.getenv("CVEAPI_DIR", "/var/lib/kat-cveapi") + directory = pathlib.Path(cveapi_dir) / "v1" + directory.mkdir(parents=True, exist_ok=True) + + last_update_filename = directory / "lastupdate.json" + last_update = None + if last_update_filename.exists(): + with last_update_filename.open() as f: + last_update = datetime.fromisoformat(json.load(f)["last_update"]) + logger.info("Last update was %s", last_update.astimezone()) + + update_timestamp = datetime.now(timezone.utc) + update_timestamp = update_timestamp.replace(microsecond=0) + + if last_update and update_timestamp - last_update > timedelta(days=120): + # The NVD API allows a maximum 120 day interval. If this is run when the + # last update is longer than 120 days we will just download everything + # again. + last_update = None + + download_files(directory, last_update, update_timestamp) + + with last_update_filename.open("w") as f: + json.dump({"last_update": update_timestamp.isoformat()}, f) diff --git a/cveapi/debian/control b/cveapi/debian/control new file mode 100644 index 00000000000..e7cdca3754f --- /dev/null +++ b/cveapi/debian/control @@ -0,0 +1,18 @@ +Source: kat-cveapi +Section: python +Priority: optional +Maintainer: OpenKAT +Build-Depends: debhelper-compat (= 13), + dh-sequence-python3, + python3, + pybuild-plugin-pyproject, + python3-poetry, +Standards-Version: 4.6.2 +Homepage: https://github.com/minvws/nl-kat-coordination +Rules-Requires-Root: no + +Package: kat-cveapi +Architecture: all +Depends: ${misc:Depends}, ${python3:Depends}, +Description: OpenKAT - Download CVE data from NVD API + Download CVE data from NVD API to make it available for OpenKAT to fetch. diff --git a/cveapi/debian/copyright b/cveapi/debian/copyright new file mode 100644 index 00000000000..73427e342ea --- /dev/null +++ b/cveapi/debian/copyright @@ -0,0 +1,10 @@ +Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: kat-cveapi +Upstream-Contact: info@openkat.nl +Source: https://github.com/minvws/nl-kat-coordination + +Files: * +Copyright: Ministry of Health, Welfare and Sport +License: EUPL + +License: EUPL diff --git a/cveapi/debian/kat-cveapi.service b/cveapi/debian/kat-cveapi.service new file mode 100644 index 00000000000..3a50639be7b --- /dev/null +++ b/cveapi/debian/kat-cveapi.service @@ -0,0 +1,31 @@ +[Unit] +Description=Download CVE API files + +[Service] +Type=oneshot +WorkingDirectory=/var/lib/kat-cveapi +StateDirectory=kat-cveapi +ExecStart=/usr/bin/cveapi +User=kat-cveapi +CapabilityBoundingSet= +RestrictNamespaces=yes +DevicePolicy=closed +KeyringMode=private +NoNewPrivileges=yes +PrivateDevices=yes +PrivateMounts=yes +PrivateTmp=yes +PrivateUsers=yes +ProtectControlGroups=yes +ProtectHome=yes +ProtectKernelModules=yes +ProtectKernelTunables=yes +ProtectSystem=strict +SystemCallArchitectures=native +SystemCallFilter=@system-service +SystemCallFilter=~@privileged @resources +RestrictRealtime=yes +LockPersonality=yes +MemoryDenyWriteExecute=yes +UMask=0022 +ReadWritePaths=/var/lib/kat-cveapi diff --git a/cveapi/debian/kat-cveapi.sysusers b/cveapi/debian/kat-cveapi.sysusers new file mode 100644 index 00000000000..c861d08df33 --- /dev/null +++ b/cveapi/debian/kat-cveapi.sysusers @@ -0,0 +1 @@ +u kat-cveapi - "OpenKAT CVE API" /var/lib/kat-cveapi diff --git a/cveapi/debian/kat-cveapi.timer b/cveapi/debian/kat-cveapi.timer new file mode 100644 index 00000000000..af711eee8b9 --- /dev/null +++ b/cveapi/debian/kat-cveapi.timer @@ -0,0 +1,11 @@ +[Unit] +Description=Download CVE API files + +[Timer] +OnActiveSec=0s +OnBootSec=120s +OnUnitActiveSec=3600s +Persistent=true + +[Install] +WantedBy=timers.target diff --git a/cveapi/debian/rules b/cveapi/debian/rules new file mode 100755 index 00000000000..977f61fd45a --- /dev/null +++ b/cveapi/debian/rules @@ -0,0 +1,8 @@ +#! /usr/bin/make -f + +%: + dh $@ --buildsystem=pybuild + +execute_after_dh_install: +# When we switch to debhelper compat lever 14 this will be done automatically. + dh_installsysusers diff --git a/cveapi/packaging/scripts/build-debian-package.sh b/cveapi/packaging/scripts/build-debian-package.sh new file mode 100755 index 00000000000..293978edf61 --- /dev/null +++ b/cveapi/packaging/scripts/build-debian-package.sh @@ -0,0 +1,18 @@ +#!/bin/bash + +set -e + +# TODO: generate proper changelog +echo "Create changelog file" +cat > debian/changelog << EOF +${PKG_NAME} (${RELEASE_VERSION}) unstable; urgency=low + * view changes: https://github.com/${REPOSITORY}/releases/tag/${RELEASE_TAG} + + -- OpenKAT $(LANG=C date -R) + +EOF + +dpkg-buildpackage -us -uc -b + +mkdir -p /app/build +mv /${PKG_NAME}_${RELEASE_VERSION}_*.deb /app/build/ diff --git a/cveapi/poetry.lock b/cveapi/poetry.lock new file mode 100644 index 00000000000..a1dc3fab8bd --- /dev/null +++ b/cveapi/poetry.lock @@ -0,0 +1,150 @@ +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. + +[[package]] +name = "certifi" +version = "2023.5.7" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, + {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.2.0" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.2.0.tar.gz", hash = "sha256:3bb3d25a8e6c0aedd251753a79ae98a093c7e7b471faa3aa9a93a81431987ace"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b87549028f680ca955556e3bd57013ab47474c3124dc069faa0b6545b6c9710"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c70087bfee18a42b4040bb9ec1ca15a08242cf5867c58726530bdf3945672ed"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a103b3a7069b62f5d4890ae1b8f0597618f628b286b03d4bc9195230b154bfa9"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94aea8eff76ee6d1cdacb07dd2123a68283cb5569e0250feab1240058f53b623"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:db901e2ac34c931d73054d9797383d0f8009991e723dab15109740a63e7f902a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0dac0ff919ba34d4df1b6131f59ce95b08b9065233446be7e459f95554c0dc8"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:193cbc708ea3aca45e7221ae58f0fd63f933753a9bfb498a3b474878f12caaad"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09393e1b2a9461950b1c9a45d5fd251dc7c6f228acab64da1c9c0165d9c7765c"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:baacc6aee0b2ef6f3d308e197b5d7a81c0e70b06beae1f1fcacffdbd124fe0e3"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bf420121d4c8dce6b889f0e8e4ec0ca34b7f40186203f06a946fa0276ba54029"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c04a46716adde8d927adb9457bbe39cf473e1e2c2f5d0a16ceb837e5d841ad4f"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:aaf63899c94de41fe3cf934601b0f7ccb6b428c6e4eeb80da72c58eab077b19a"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62e51710986674142526ab9f78663ca2b0726066ae26b78b22e0f5e571238dd"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win32.whl", hash = "sha256:04e57ab9fbf9607b77f7d057974694b4f6b142da9ed4a199859d9d4d5c63fe96"}, + {file = "charset_normalizer-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:48021783bdf96e3d6de03a6e39a1171ed5bd7e8bb93fc84cc649d11490f87cea"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4957669ef390f0e6719db3613ab3a7631e68424604a7b448f079bee145da6e09"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46fb8c61d794b78ec7134a715a3e564aafc8f6b5e338417cb19fe9f57a5a9bf2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f779d3ad205f108d14e99bb3859aa7dd8e9c68874617c72354d7ecaec2a054ac"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f25c229a6ba38a35ae6e25ca1264621cc25d4d38dca2942a7fce0b67a4efe918"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2efb1bd13885392adfda4614c33d3b68dee4921fd0ac1d3988f8cbb7d589e72a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f30b48dd7fa1474554b0b0f3fdfdd4c13b5c737a3c6284d3cdc424ec0ffff3a"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:246de67b99b6851627d945db38147d1b209a899311b1305dd84916f2b88526c6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd9b3b31adcb054116447ea22caa61a285d92e94d710aa5ec97992ff5eb7cf3"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8c2f5e83493748286002f9369f3e6607c565a6a90425a3a1fef5ae32a36d749d"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3170c9399da12c9dc66366e9d14da8bf7147e1e9d9ea566067bbce7bb74bd9c2"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7a4826ad2bd6b07ca615c74ab91f32f6c96d08f6fcc3902ceeedaec8cdc3bcd6"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3b1613dd5aee995ec6d4c69f00378bbd07614702a315a2cf6c1d21461fe17c23"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e608aafdb55eb9f255034709e20d5a83b6d60c054df0802fa9c9883d0a937aa"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win32.whl", hash = "sha256:f2a1d0fd4242bd8643ce6f98927cf9c04540af6efa92323e9d3124f57727bfc1"}, + {file = "charset_normalizer-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:681eb3d7e02e3c3655d1b16059fbfb605ac464c834a0c629048a30fad2b27489"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c57921cda3a80d0f2b8aec7e25c8aa14479ea92b5b51b6876d975d925a2ea346"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41b25eaa7d15909cf3ac4c96088c1f266a9a93ec44f87f1d13d4a0e86c81b982"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f058f6963fd82eb143c692cecdc89e075fa0828db2e5b291070485390b2f1c9c"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7647ebdfb9682b7bb97e2a5e7cb6ae735b1c25008a70b906aecca294ee96cf4"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eef9df1eefada2c09a5e7a40991b9fc6ac6ef20b1372abd48d2794a316dc0449"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e03b8895a6990c9ab2cdcd0f2fe44088ca1c65ae592b8f795c3294af00a461c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ee4006268ed33370957f55bf2e6f4d263eaf4dc3cfc473d1d90baff6ed36ce4a"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c4983bf937209c57240cff65906b18bb35e64ae872da6a0db937d7b4af845dd7"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:3bb7fda7260735efe66d5107fb7e6af6a7c04c7fce9b2514e04b7a74b06bf5dd"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:72814c01533f51d68702802d74f77ea026b5ec52793c791e2da806a3844a46c3"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:70c610f6cbe4b9fce272c407dd9d07e33e6bf7b4aa1b7ffb6f6ded8e634e3592"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:a401b4598e5d3f4a9a811f3daf42ee2291790c7f9d74b18d75d6e21dda98a1a1"}, + {file = "charset_normalizer-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c0b21078a4b56965e2b12f247467b234734491897e99c1d51cee628da9786959"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:95eb302ff792e12aba9a8b8f8474ab229a83c103d74a750ec0bd1c1eea32e669"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1a100c6d595a7f316f1b6f01d20815d916e75ff98c27a01ae817439ea7726329"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6339d047dab2780cc6220f46306628e04d9750f02f983ddb37439ca47ced7149"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4b749b9cc6ee664a3300bb3a273c1ca8068c46be705b6c31cf5d276f8628a94"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38856a971c602f98472050165cea2cdc97709240373041b69030be15047691f"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f87f746ee241d30d6ed93969de31e5ffd09a2961a051e60ae6bddde9ec3583aa"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89f1b185a01fe560bc8ae5f619e924407efca2191b56ce749ec84982fc59a32a"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1c8a2f4c69e08e89632defbfabec2feb8a8d99edc9f89ce33c4b9e36ab63037"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2f4ac36d8e2b4cc1aa71df3dd84ff8efbe3bfb97ac41242fbcfc053c67434f46"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a386ebe437176aab38c041de1260cd3ea459c6ce5263594399880bbc398225b2"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:ccd16eb18a849fd8dcb23e23380e2f0a354e8daa0c984b8a732d9cfaba3a776d"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e6a5bf2cba5ae1bb80b154ed68a3cfa2fa00fde979a7f50d6598d3e17d9ac20c"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45de3f87179c1823e6d9e32156fb14c1927fcc9aba21433f088fdfb555b77c10"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1000fba1057b92a65daec275aec30586c3de2401ccdcd41f8a5c1e2c87078706"}, + {file = "charset_normalizer-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b2c760cfc7042b27ebdb4a43a4453bd829a5742503599144d54a032c5dc7e9e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:855eafa5d5a2034b4621c74925d89c5efef61418570e5ef9b37717d9c796419c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:203f0c8871d5a7987be20c72442488a0b8cfd0f43b7973771640fc593f56321f"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e857a2232ba53ae940d3456f7533ce6ca98b81917d47adc3c7fd55dad8fab858"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e86d77b090dbddbe78867a0275cb4df08ea195e660f1f7f13435a4649e954e5"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4fb39a81950ec280984b3a44f5bd12819953dc5fa3a7e6fa7a80db5ee853952"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dee8e57f052ef5353cf608e0b4c871aee320dd1b87d351c28764fc0ca55f9f4"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8700f06d0ce6f128de3ccdbc1acaea1ee264d2caa9ca05daaf492fde7c2a7200"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1920d4ff15ce893210c1f0c0e9d19bfbecb7983c76b33f046c13a8ffbd570252"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c1c76a1743432b4b60ab3358c937a3fe1341c828ae6194108a94c69028247f22"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f7560358a6811e52e9c4d142d497f1a6e10103d3a6881f18d04dbce3729c0e2c"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c8063cf17b19661471ecbdb3df1c84f24ad2e389e326ccaf89e3fb2484d8dd7e"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cd6dbe0238f7743d0efe563ab46294f54f9bc8f4b9bcf57c3c666cc5bc9d1299"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1249cbbf3d3b04902ff081ffbb33ce3377fa6e4c7356f759f3cd076cc138d020"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win32.whl", hash = "sha256:6c409c0deba34f147f77efaa67b8e4bb83d2f11c8806405f76397ae5b8c0d1c9"}, + {file = "charset_normalizer-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7095f6fbfaa55defb6b733cfeb14efaae7a29f0b59d8cf213be4e7ca0b857b80"}, + {file = "charset_normalizer-3.2.0-py3-none-any.whl", hash = "sha256:8e098148dd37b4ce3baca71fb394c81dc5d9c7728c95df695d2dca218edf40e6"}, +] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "urllib3" +version = "2.0.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.7" +files = [ + {file = "urllib3-2.0.3-py3-none-any.whl", hash = "sha256:48e7fafa40319d358848e1bc6809b208340fafe2096f1725d05d67443d0483d1"}, + {file = "urllib3-2.0.3.tar.gz", hash = "sha256:bee28b5e56addb8226c96f7f13ac28cb4c301dd5ea8a6ca179c0b9835e032825"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +secure = ["certifi", "cryptography (>=1.9)", "idna (>=2.0.0)", "pyopenssl (>=17.1.0)", "urllib3-secure-extra"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.11" +content-hash = "a5780ef8e06df616beb6eb67292099db49b8fe658fcbf22940e5e1af96a7c14e" diff --git a/cveapi/pyproject.toml b/cveapi/pyproject.toml new file mode 100644 index 00000000000..d0545308423 --- /dev/null +++ b/cveapi/pyproject.toml @@ -0,0 +1,21 @@ +[tool.black] +target-version = ["py38", "py39", "py310", "py311"] +line-length = 120 + +[tool.poetry] +name = "cveapi" +version = "0.0.1.dev1" +description = "CVE API" +license = "EUPL" +authors = ["MinVWS "] + +[tool.poetry.dependencies] +python = "^3.11" +requests = "^2.31.0" + +[tool.poetry.scripts] +cveapi = 'cveapi:run' + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/docs/source/release_notes/1.10.rst b/docs/source/release_notes/1.10.rst index 99ae2bcd302..a38b45a779a 100644 --- a/docs/source/release_notes/1.10.rst +++ b/docs/source/release_notes/1.10.rst @@ -13,6 +13,10 @@ severity in XTDB. By doing those queries completely in XTDB we fixed several performance issues. Finding types are added by boefjes which will also give more flexibility adding/changing/updating finding types in the future. +The CVE finding boefje will download the CVE information from +https://cve.openkat.dev/. It is also possible to run your own instance of this +API, see :ref:`CVE API` for more information. + The Python version used in the container images have been updated to 3.11. Python 3.11 is a lot faster so this should also make OpenKAT faster. Django version has also been updated to version 4.2. diff --git a/docs/source/technical_design/cveapi.rst b/docs/source/technical_design/cveapi.rst new file mode 100644 index 00000000000..a84ea637971 --- /dev/null +++ b/docs/source/technical_design/cveapi.rst @@ -0,0 +1,35 @@ +======= +CVE API +======= + +OpenKAT will request information about CVE's from https://cve.openkat.dev. It is +possible to run your own instance in case you don't want to rely on third party +service for this. The kat-cveapi Debian package that can be downloaded from +`GitHub `__ can +be used for this. + +The package has a script that will download all the CVE information to the +`/var/lib/kat-cveapi` directory. The package includes a systemd timer that will +run the script after the package is installed and hourly to keep the CVE +information up-to-date. The `/var/lib/kat-cveapi` can then be served as static +files by your webserver. Example nginx configuration that is used by +https://cve.openkat.dev/: + +.. code-block:: sh + + server { + listen [::]:443; + + server_name cve.openkat.dev; + + ssl_certificate /etc/letsencrypt/live/openkat.dev/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/openkat.dev/privkey.pem; + + access_log /var/log/nginx/cve/access.log; + error_log /var/log/nginx/cve/error.log; + + root /var/lib/kat-cveapi; + } + +The CVEAPI_URL configuration parameter of the kat_cve_finding_types boefje can +then be set to your own instance. diff --git a/docs/source/technical_design/index.rst b/docs/source/technical_design/index.rst index 695eb930416..58e9637fcd2 100644 --- a/docs/source/technical_design/index.rst +++ b/docs/source/technical_design/index.rst @@ -17,3 +17,4 @@ Contains documentation for developers and contributors. debuggingtroubleshooting latex externalauthentication + cveapi diff --git a/pyproject.toml b/pyproject.toml index c9da5fba389..aa9bb4ada54 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,7 @@ task-tags = ["Example", "todo", "TODO", "FIXME"] "*/tests/*" = ["T20"] "boefjes/boefjes/plugins/*" = ["PTH"] "scripts/*.py" = ["INP001", "T201"] +"cveapi/cveapi.py" = ["INP001"] [tool.ruff.flake8-tidy-imports] [tool.ruff.flake8-tidy-imports.banned-api] diff --git a/rocky/onboarding/templates/step_1_introduction.html b/rocky/onboarding/templates/step_1_introduction.html index 427f7b59c89..3f07db38b63 100644 --- a/rocky/onboarding/templates/step_1_introduction.html +++ b/rocky/onboarding/templates/step_1_introduction.html @@ -13,7 +13,7 @@

{% translate "Welcome to OpenKAT" %}

{% blocktranslate trimmed %} OpenKAT is the "Kwetsbaarheden Analyse Tool" (Vulnerabilities Analysis Tool). An Open-Source-project developed by the - Ministry of Public Health, Wellbeing and Sports to make your and our world a safer place. + Ministry of Health, Welfare and Sport to make your and our world a safer place. {% endblocktranslate %}

{% include "partials/stepper.html" %} From c0b7af1e1c819690dd216736a4b5222e6b21282f Mon Sep 17 00:00:00 2001 From: Jeroen Dekkers Date: Wed, 19 Jul 2023 14:42:47 +0200 Subject: [PATCH 14/18] Don't scan hostname nmap in nmap boefje (#1415) Co-authored-by: Patrick --- boefjes/boefjes/plugins/kat_nmap/main.py | 1 - boefjes/tests/test_nmap.py | 8 -------- 2 files changed, 9 deletions(-) diff --git a/boefjes/boefjes/plugins/kat_nmap/main.py b/boefjes/boefjes/plugins/kat_nmap/main.py index cc92a2b0e47..1ecd1a3deed 100644 --- a/boefjes/boefjes/plugins/kat_nmap/main.py +++ b/boefjes/boefjes/plugins/kat_nmap/main.py @@ -28,7 +28,6 @@ def build_nmap_arguments(host: str, protocol: Protocol, top_ports: Optional[int] """Returns Nmap arguments to use based on protocol and top_ports for host.""" ip = ip_address(host) args = [ - "nmap", "--open", "-T4", "-Pn", diff --git a/boefjes/tests/test_nmap.py b/boefjes/tests/test_nmap.py index 0656416dc34..e4a16cf41a7 100644 --- a/boefjes/tests/test_nmap.py +++ b/boefjes/tests/test_nmap.py @@ -8,7 +8,6 @@ def test_nmap_arguments_tcp_top_150(self): args = build_nmap_arguments("1.1.1.1", Protocol.TCP, 250) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -29,7 +28,6 @@ def test_nmap_arguments_tcp_top_150_ipv6(self): args = build_nmap_arguments("2001:19f0:5001:23fe:5400:3ff:fe60:883b", Protocol.TCP, 250) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -51,7 +49,6 @@ def test_nmap_arguments_tcp_full(self): args = build_nmap_arguments("1.1.1.1", Protocol.TCP, None) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -71,7 +68,6 @@ def test_nmap_arguments_tcp_full_ipv6(self): args = build_nmap_arguments("2001:19f0:5001:23fe:5400:3ff:fe60:883b", Protocol.TCP, None) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -92,7 +88,6 @@ def test_nmap_arguments_udp_full(self): args = build_nmap_arguments("1.1.1.1", Protocol.UDP, None) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -112,7 +107,6 @@ def test_nmap_arguments_udp_full_ipv6(self): args = build_nmap_arguments("2001:19f0:5001:23fe:5400:3ff:fe60:883b", Protocol.UDP, None) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -133,7 +127,6 @@ def test_nmap_arguments_udp_top250(self): args = build_nmap_arguments("1.1.1.1", Protocol.UDP, 250) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", @@ -154,7 +147,6 @@ def test_nmap_arguments_udp_top250_ipv6(self): args = build_nmap_arguments("2001:19f0:5001:23fe:5400:3ff:fe60:883b", Protocol.UDP, 250) self.assertListEqual( [ - "nmap", "--open", "-T4", "-Pn", From ff32f91f74eb80bf0600364dc6c497a02a98271f Mon Sep 17 00:00:00 2001 From: JP Bruins Slot Date: Wed, 19 Jul 2023 15:19:47 +0200 Subject: [PATCH 15/18] Reschedule tasks when no results in bytes are found after grace period (#1410) Co-authored-by: Patrick Co-authored-by: Mark Janssen <20283+praseodym@users.noreply.github.com> --- mula/scheduler/schedulers/boefje.py | 18 ++++++-- .../integration/test_boefje_scheduler.py | 43 ++++++++++++++++++- 2 files changed, 57 insertions(+), 4 deletions(-) diff --git a/mula/scheduler/schedulers/boefje.py b/mula/scheduler/schedulers/boefje.py index d637d587b40..677fbfafa48 100644 --- a/mula/scheduler/schedulers/boefje.py +++ b/mula/scheduler/schedulers/boefje.py @@ -424,10 +424,22 @@ def is_task_running(self, task: BoefjeTask) -> bool: # Task has been finished (failed, or succeeded) according to # the datastore, but we have no results of it in bytes, meaning - # we have a problem. - if task_bytes is None and task_db is not None and task_db.status in [TaskStatus.COMPLETED, TaskStatus.FAILED]: + # we have a problem. However when the grace period has been reached we + # should not raise an error. + if ( + task_bytes is None + and task_db is not None + and task_db.status in [TaskStatus.COMPLETED, TaskStatus.FAILED] + and ( + task_db.modified_at is not None + and task_db.modified_at + > datetime.now(timezone.utc) - timedelta(seconds=self.ctx.config.pq_populate_grace_period) + ) + ): self.logger.error( - "Task has been finished, but no results found in bytes " + "Task has been finished, but no results found in bytes, " + "please review the bytes logs for more information regarding " + "this error. " "[task.id=%s, task.hash=%s, organisation_id=%s, scheduler_id=%s]", task_db.id, task.hash, diff --git a/mula/tests/integration/test_boefje_scheduler.py b/mula/tests/integration/test_boefje_scheduler.py index 9fb72c2ea0a..c6bac1cf5ae 100644 --- a/mula/tests/integration/test_boefje_scheduler.py +++ b/mula/tests/integration/test_boefje_scheduler.py @@ -306,7 +306,7 @@ def test_is_task_running_bytes_not_running(self): # Assert self.assertFalse(is_running) - def test_is_task_running_mismatch(self): + def test_is_task_running_mismatch_before_grace_period(self): """When a task has finished according to the datastore, (e.g. failed or completed), but there are no results in bytes, we have a problem. """ @@ -346,6 +346,47 @@ def test_is_task_running_mismatch(self): with self.assertRaises(RuntimeError): self.scheduler.is_task_running(task) + def test_is_task_running_mismatch_after_grace_period(self): + """When a task has finished according to the datastore, (e.g. failed + or completed), but there are no results in bytes, we have a problem. + However when the grace period has been reached we should not raise + an error. + """ + # Arrange + scan_profile = ScanProfileFactory(level=0) + ooi = OOIFactory(scan_profile=scan_profile) + boefje = PluginFactory(scan_level=0, consumes=[ooi.object_type]) + task = models.BoefjeTask( + boefje=boefje, + input_ooi=ooi.primary_key, + organization=self.organisation.id, + ) + + p_item = models.PrioritizedItem( + id=task.id, + scheduler_id=self.scheduler.scheduler_id, + priority=1, + data=task, + hash=task.hash, + ) + + task_db = models.Task( + id=p_item.id, + scheduler_id=self.scheduler.scheduler_id, + type="boefje", + p_item=p_item, + status=models.TaskStatus.COMPLETED, + created_at=datetime.now(timezone.utc), + modified_at=datetime.now(timezone.utc) - timedelta(seconds=self.mock_ctx.config.pq_populate_grace_period), + ) + + # Mock + self.mock_get_latest_task_by_hash.return_value = task_db + self.mock_get_last_run_boefje.return_value = None + + # Act + self.assertFalse(self.scheduler.is_task_running(task)) + def test_has_grace_period_passed_datastore_passed(self): """Grace period passed according to datastore, and the status is completed""" # Arrange From b2dd85f376e0b639c36a382428ba1dda3d247ee5 Mon Sep 17 00:00:00 2001 From: Jeroen Dekkers Date: Fri, 21 Jul 2023 16:39:02 +0200 Subject: [PATCH 16/18] Fix translation in Debian package (#1432) --- rocky/MANIFEST.in | 1 + 1 file changed, 1 insertion(+) diff --git a/rocky/MANIFEST.in b/rocky/MANIFEST.in index 33c1b86b5e0..1429fca137e 100644 --- a/rocky/MANIFEST.in +++ b/rocky/MANIFEST.in @@ -4,6 +4,7 @@ include LICENSE recursive-include rocky/templates *.html recursive-include rocky/templates *.txt recursive-include rocky/locale *.mo +recursive-include rocky/locale *.po recursive-include fmea/templates *.html recursive-include fmea/templates *.txt recursive-include katalogus/templates *.html From b4b3d4ed5405c9b525be4f22e03ee64fe1ffac33 Mon Sep 17 00:00:00 2001 From: Roelof Korporaal Date: Fri, 21 Jul 2023 17:17:10 +0200 Subject: [PATCH 17/18] Use the correct clearance level variable in organization member list template (#1427) Co-authored-by: Jeroen Dekkers --- .../rocky/templates/organizations/organization_member_list.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rocky/rocky/templates/organizations/organization_member_list.html b/rocky/rocky/templates/organizations/organization_member_list.html index ee389b8d7f3..f79926f6c17 100644 --- a/rocky/rocky/templates/organizations/organization_member_list.html +++ b/rocky/rocky/templates/organizations/organization_member_list.html @@ -77,7 +77,7 @@

{% translate "Members" %}

{% endif %} - {% if member.trusted_clearance_level < 0 %} + {% if member.acknowledged_clearance_level < 0 %} None {% else %} L{{ member.acknowledged_clearance_level }} From 724fb127db0018447848f8540a1c5a52a85a0624 Mon Sep 17 00:00:00 2001 From: Rieven Date: Mon, 24 Jul 2023 09:49:40 +0200 Subject: [PATCH 18/18] Fix robot test (#1420) Co-authored-by: Jan Klopper --- rocky/tests/robot/ci/01_rocky_loads.robot | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/rocky/tests/robot/ci/01_rocky_loads.robot b/rocky/tests/robot/ci/01_rocky_loads.robot index 809d1470e8d..e738a43d4d2 100644 --- a/rocky/tests/robot/ci/01_rocky_loads.robot +++ b/rocky/tests/robot/ci/01_rocky_loads.robot @@ -13,7 +13,7 @@ I want to add indemnifications Fill Text xpath=//*[@id="id_name"] Dev Org Fill Text xpath=//*[@id="id_code"] dev Click "Submit" - #Click xpath=//button[contains(text(),"Submit")] + # Click xpath=//button[contains(text(),"Submit")] Check Checkbox css=#id_may_scan Check Checkbox css=#id_am_authorized Click "Submit" @@ -33,11 +33,8 @@ I want to create a redteamer account I want to create a client account Create A User While Onboarding Client client@localhost P@SSw00rdClient!123456789 -I can confirm that I can proceed - Click xpath=//a[@class="button"] - Click "Continue" - Click "Continue" - Click "Continue with this account, onboard me!" +I want to land on crisis room after adding optional users + Get Title equal OpenKAT - crisis_room # Note: the CI should be extended when the error 500 is properly debugged @@ -49,4 +46,4 @@ Create A User While Onboarding Fill Text xpath=//*[@id="id_email"] ${email} Fill Text xpath=//*[@id="id_password"] ${password} Click "Submit" - #Get Text .confirmation contains successfully error account creation failed + # Get Text .confirmation contains successfully error account creation failed