diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py
index 87e0472c3f..5a4de5086a 100644
--- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py
+++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py
@@ -916,7 +916,12 @@ def run(self, update_position=False):
if len(monitored_files) == 0:
raise AttributeError('There is no path to monitor. Exiting...')
for path in monitored_files:
- output_path = f'{host}_{path.split("/")[-1]}.tmp'
+ if '\\' in path:
+ first_path_element = path.split("\\")[-1]
+ else:
+ first_path_element = path.split("/")[-1]
+
+ output_path = f'{host}_{first_path_element}.tmp'
self._file_content_collectors.append(self.file_composer(host=host, path=path, output_path=output_path))
logger.debug(f'Add new file composer process for {host} and path: {path}')
self._file_monitors.append(self._start(host=host,
diff --git a/deps/wazuh_testing/wazuh_testing/tools/system.py b/deps/wazuh_testing/wazuh_testing/tools/system.py
index 985be570a6..f6e0fab106 100644
--- a/deps/wazuh_testing/wazuh_testing/tools/system.py
+++ b/deps/wazuh_testing/wazuh_testing/tools/system.py
@@ -53,8 +53,15 @@ def get_inventory(self) -> dict:
"""
return self.inventory
+ def get_inventory_path(self) -> str:
+ """Get the loaded Ansible inventory.
+
+ Returns:
+ self.inventory: Ansible inventory
+ """
+ return self.inventory_path
- def get_group_hosts(self, pattern=None):
+ def get_group_hosts(self, pattern='None'):
"""Get all hosts from inventory that belong to a group.
Args:
@@ -68,6 +75,13 @@ def get_group_hosts(self, pattern=None):
else:
return [str(host) for host in self.inventory_manager.get_hosts()]
+
+ def get_host_groups(self, host):
+ """
+ """
+ group_list = self.inventory_manager.get_host(host).get_groups()
+ return [str(group) for group in group_list]
+
def get_host_variables(self, host):
"""Get the variables of the specified host.
@@ -92,6 +106,10 @@ def get_host(self, host: str):
"""
return testinfra.get_host(f"ansible://{host}?ansible_inventory={self.inventory_path}")
+ def truncate_file(self, host: str, filepath: str):
+ self.get_host(host).ansible("command", f"truncate -s 0 {filepath}", check=False)
+
+
def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/ossec.conf', check: bool = False):
"""Move from src_path to the desired location dest_path for the specified host.
@@ -101,9 +119,18 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o
dest_path (str): Destination path
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied.
"""
- self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0775",
- check=check)
-
+ system = 'linux'
+ if 'os_name' in self.get_host_variables(host):
+ host_os_name = self.get_host_variables(host)['os_name']
+ if host_os_name == 'windows':
+ system = 'windows'
+
+ if system == 'linux':
+ a = self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0644",
+ check=check)
+ print(a)
+ else:
+ self.get_host(host).ansible("ansible.windows.win_copy", f"src='{src_path}' dest='{dest_path}'", check=check)
def add_block_to_file(self, host: str, path: str, replace: str, before: str, after, check: bool = False):
"""Add text block to desired file.
@@ -400,24 +427,30 @@ def download_file(self, host, url, dest_path, mode='755'):
a = self.get_host(host).ansible("get_url", f"url={url} dest={dest_path} mode={mode}", check=False)
return a
- def install_package(self, host, url, package_manager):
+ def install_package(self, host, url, system='ubuntu'):
result = False
- if package_manager == 'apt':
+ if system =='windows':
+ a = self.get_host(host).ansible("win_package", f"path={url} arguments=/S", check=False)
+ print(a)
+ elif system == 'ubuntu':
a = self.get_host(host).ansible("apt", f"deb={url}", check=False)
if a['changed'] == True and a['stderr'] == '':
result = True
- elif package_manager == 'yum':
+ elif system == 'centos':
a = self.get_host(host).ansible("yum", f"name={url} state=present sslverify=false disable_gpg_check=True", check=False)
if 'rc' in a and a['rc'] == 0 and a['changed'] == True:
result = True
- def remove_package(self, host, package_name, package_manager):
+
+ def remove_package(self, host, package_name, system):
result = False
- if package_manager == 'apt':
+ if system == 'windows':
+ a = self.get_host(host).ansible("win_package", f"path={package_name} state=absent arguments=/S", check=False)
+ elif system == 'ubuntu':
a = self.get_host(host).ansible("apt", f"name={package_name} state=absent", check=False)
if a['changed'] == True and a['stderr'] == '':
result = True
- elif package_manager == 'yum':
+ elif system == 'centos':
a = self.get_host(host).ansible("yum", f"name={package_name} state=absent", check=False)
if 'rc' in a and a['rc'] == 0 and a['changed'] == True:
result = True
@@ -427,13 +460,21 @@ def handle_wazuh_services(self, host, operation):
os = self.get_host_variables(host)['os_name']
binary_path = None
if os == 'windows':
- self.get_host(host).ansible('ansible.windows.win_command', f"cmd=NET {operation} Wazuh", check=False)
+ if operation == 'restart':
+ a = self.get_host(host).ansible('ansible.windows.win_shell', f'NET stop Wazuh', check=False)
+ b = self.get_host(host).ansible('ansible.windows.win_shell', f'NET start Wazuh', check=False)
+
+ print(a)
+ print(b)
+ else:
+ a = self.get_host(host).ansible('ansible.windows.win_shell', f'NET {operation} Wazuh', check=False)
+ print(a)
else:
if os == 'linux':
binary_path = f"/var/ossec/bin/wazuh-control"
elif os == 'macos':
binary_path = f"/Library/Ossec/bin/wazuh-control"
- self.get_host(host).ansible('ansible.builtin.command', f'cmd="{binary_path} {operation}"', check=False)
+ self.get_host(host).ansible('shell', f"{binary_path} {operation}", check=False)
def clean_environment(host_manager, target_files):
diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml
index 76e45a0888..9dbe620718 100644
--- a/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml
+++ b/provisioning/roles/wazuh/ansible-wazuh-agent/defaults/main.yml
@@ -60,6 +60,7 @@ wazuh_winagent_config_url: https://packages.wazuh.com/4.x/windows/wazuh-agent-4.
wazuh_winagent_package_name: wazuh-agent-4.8.0-1.msi
wazuh_winagent_package_name_generic: wazuh-agent.msi
wazuh_dir: "/var/ossec"
+wazuh_macos_dir: "/Library/Ossec"
# This is deprecated, see: wazuh_agent_address
wazuh_agent_nat: false
diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml
index 84f3ff4553..56d5a281eb 100644
--- a/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml
+++ b/provisioning/roles/wazuh/ansible-wazuh-agent/handlers/main.yml
@@ -4,3 +4,6 @@
- name: Windows | Restart Wazuh Agent
win_service: name=WazuhSvc start_mode=auto state=restarted
+
+- name: MacOS | restart wazuh-agent
+ ansible.builtin.shell: "{{ wazuh_macos_dir }}/bin/wazuh-control restart"
\ No newline at end of file
diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml
index 78f28e8305..fb19fbd320 100644
--- a/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml
+++ b/provisioning/roles/wazuh/ansible-wazuh-agent/tasks/MacOS.yml
@@ -6,3 +6,15 @@
- include_tasks: "installation_from_custom_packages.yml"
when: wazuh_custom_packages_installation_agent_enabled
+
+- name: MacOS | Installing agent configuration (ossec.conf)
+ template:
+ src: var-ossec-etc-ossec-agent.conf.j2
+ dest: "{{ wazuh_macos_dir }}/etc/ossec.conf"
+ owner: root
+ group: wazuh
+ mode: 0644
+ notify: restart wazuh-agent
+ tags:
+ - init
+ - config
diff --git a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2 b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2
index 8eef3d1dae..165d8e64b1 100644
--- a/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2
+++ b/provisioning/roles/wazuh/ansible-wazuh-agent/templates/var-ossec-etc-ossec-agent.conf.j2
@@ -44,7 +44,7 @@
{{ wazuh_agent_config.enrollment.manager_address }}
{% endif %}
{% if wazuh_agent_config.enrollment.agent_name | length > 0 %}
- {{ wazuh_agent_config.enrollment.agent_name }}
+ {{ ansible_hostname }}
{% endif %}
{% if wazuh_agent_config.enrollment.port is defined > 0 %}
{{ wazuh_agent_config.enrollment.port }}
diff --git a/tests/end_to_end/test_vulnerability_detector/cases.yaml b/tests/end_to_end/test_vulnerability_detector/cases.yaml
index b1ac56fba3..2f548d2edf 100644
--- a/tests/end_to_end/test_vulnerability_detector/cases.yaml
+++ b/tests/end_to_end/test_vulnerability_detector/cases.yaml
@@ -13,70 +13,144 @@
amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb
arm: null
windows:
- amd64: http://sourceforge.net/projects/firebird/files/firebird-win32/2.0.7-Release/Firebird-2.0.7.13318_0_win32.exe/download
- check_alerts:
+ amd64: https://get.videolan.org/vlc/3.0.6/win64/vlc-3.0.6-win64.exe
+ macos:
+ amd64: https://nodejs.org/dist/v17.0.1/node-v17.0.1.pkg
+
+# {"timestamp":"2023-11-14T10:52:41.932+0000","rule":{"level":10,"description":"CVE-2020-28924 affects rclone","id":"23505","firedtimes":392,"mail":false,"groups":["vulnerability-detector"],"gdpr":["IV_35.7.d"],"pci_dss":["11.2.1","11.2.3"],"tsc":["CC7.1","CC7.2"]},"agent":{"id":"002","name":"ip-172-31-12-122.ec2.internal","ip":"172.31.12.122"},"manager":{"name":"ip-172-31-6-24"},"id":"1699959161.19604197","cluster":{"name":"wazuh","node":"master"},"decoder":{"name":"json"},"data":{"vulnerability":{"package":{"name":"rclone","version":"1.49.5-1","architecture":"x86_64","condition":"Package less than 1.53.3"},"cvss":{"cvss2":{"vector":{"attack_vector":"network","access_complexity":"low","authentication":"none","confidentiality_impact":"partial","integrity_impact":"none","availability":"none"},"base_score":"5","exploitability_score":"10","impact_score":"2.900000"},"cvss3":{"vector":{"attack_vector":"network","access_complexity":"low","privileges_required":"none","user_interaction":"none","scope":"unchanged","confidentiality_impact":"high","integrity_impact":"none","availability":"none"},"base_score":"7.500000","exploitability_score":"3.900000","impact_score":"3.600000"}},"cve":"CVE-2020-28924","title":"CVE-2020-28924 affects rclone","rationale":"An issue was discovered in Rclone before 1.53.3. Due to the use of a weak random number generator, the password generator has been producing weak passwords with much less entropy than advertised. The suggested passwords depend deterministically on the time the second rclone was started. This limits the entropy of the passwords enormously. These passwords are often used in the crypt backend for encryption of data. It would be possible to make a dictionary of all possible passwords with about 38 million entries per password length. This would make decryption of secret material possible with a plausible amount of effort. NOTE: all passwords generated by affected versions should be changed.","severity":"High","published":"2020-11-19","updated":"2022-04-26","cwe_reference":"CWE-331","status":"Active","type":"PACKAGE","references":["https://github.com/rclone/rclone/issues/4783","https://lists.fedoraproject.org/archives/list/package-announce@lists.fedoraproject.org/message/UJIFT24Q6EFXLQZ24AER2QGFFZLMIPCD/","https://rclone.org/downloads/","https://security.gentoo.org/glsa/202107-14","https://nvd.nist.gov/vuln/detail/CVE-2020-28924"],"assigner":"cve@mitre.org"}},"location":"vulnerability-detector"}
+
+ check_agent_alert_indexer:
centos:
- - event: syscollector_install_package_alert_yum
- parameters:
- PACKAGE_NAME: "rclone"
- PACKAGE_VERSION: "1.49.5"
- ubuntu:
- - event: syscollector_install_package_alert_apt
- parameters:
- PACKAGE_NAME: "rclone"
- PACKAGE_VERSION: "1.49.5"
- teardown:
- - remove_package:
- package:
- centos: rclone
- ubuntu: rclone
-
-- case: "Updating a vulnerable package that remains vulnerable to the same CVE"
- id: "update_vuln_package_remain_vulnerable"
- description: "Updating a vulnerable package that remains vulnerable to the same CVE"
- preconditions:
- tasks:
- - install_package:
- package:
- centos:
- amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm
- ubuntu:
- amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb
- check_alerts:
- centos:
+ amd64:
- event: syscollector_install_package_alert_yum
parameters:
PACKAGE_NAME: "rclone"
PACKAGE_VERSION: "1.49.5"
- ubuntu:
- - event: syscollector_install_package_alert_apt
+ arm:
+ - event: syscollector_install_package_alert_yum
parameters:
PACKAGE_NAME: "rclone"
PACKAGE_VERSION: "1.49.5"
- body:
- tasks:
- - install_package:
- package:
- centos:
- amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm
- ubuntu:
- amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb
- check_alerts:
- centos:
- - event: upgrade_package
- parameters:
- PACKAGE_NAME: "rclone"
- PACKAGE_VERSION: "1.50.0"
ubuntu:
- - event: upgrade_package
+ amd64:
+ - event: syscollector_install_package_alert_apt
+ parameters:
+ PACKAGE_NAME: "rclone"
+ PACKAGE_VERSION: "1.49.5"
+ arm:
+ - event: syscollector_install_package_alert_apt
parameters:
PACKAGE_NAME: "rclone"
- PACKAGE_VERSION: "1.50.0"
+ PACKAGE_VERSION: "1.49.5"
+ windows:
+ amd64:
+ - event: syscollector_install_package_alert_apt
+ parameters:
+ PACKAGE_NAME: "VideoLAN"
+ PACKAGE_VERSION: "3.0.6"
+ macos:
+ amd64:
+ - event: syscollector_install_package_alert_apt
+ parameters:
+ PACKAGE_NAME:
+ PACKAGE_VERSION:
+
+
+ # check_alerts:
+ # centos:
+ # amd64:
+ # - event: syscollector_install_package_alert_yum
+ # parameters:
+ # PACKAGE_NAME: "rclone"
+ # PACKAGE_VERSION: "1.49.5"
+ # arm:
+ # - event: syscollector_install_package_alert_yum
+ # parameters:
+ # PACKAGE_NAME: "rclone"
+ # PACKAGE_VERSION: "1.49.5"
+ # ubuntu:
+ # amd64:
+ # - event: syscollector_install_package_alert_apt
+ # parameters:
+ # PACKAGE_NAME: "rclone"
+ # PACKAGE_VERSION: "1.49.5"
+ # arm:
+ # - event: syscollector_install_package_alert_apt
+ # parameters:
+ # PACKAGE_NAME: "rclone"
+ # PACKAGE_VERSION: "1.49.5"
+ # windows:
+ # amd64:
+ # - event: syscollector_install_package_alert_apt
+ # parameters:
+ # PACKAGE_NAME: "VideoLAN"
+ # PACKAGE_VERSION: "3.0.6"
+ # macos:
+ # amd64:
+ # - event: syscollector_install_package_alert_apt
+ # parameters:
+ # PACKAGE_NAME:
+ # PACKAGE_VERSION:
teardown:
- remove_package:
package:
- centos: rclone
- ubuntu: rclone
+ centos:
+ amd: rclone
+ arm: rclone
+ ubuntu:
+ amd: rclone
+ arm: rclone
+ windows:
+ amd64: C:\\\\Program Files\\\\VideoLAN\\\\VLC\\\\uninstall.exe
+ macos:
+ amd64:
+
+# - case: "Updating a vulnerable package that remains vulnerable to the same CVE"
+# id: "update_vuln_package_remain_vulnerable"
+# description: "Updating a vulnerable package that remains vulnerable to the same CVE"
+# preconditions:
+# tasks:
+# - install_package:
+# package:
+# centos:
+# amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.rpm
+# ubuntu:
+# amd64: https://downloads.rclone.org/v1.49.5/rclone-v1.49.5-linux-amd64.deb
+# check_alerts:
+# centos:
+# - event: syscollector_install_package_alert_yum
+# parameters:
+# PACKAGE_NAME: "rclone"
+# PACKAGE_VERSION: "1.49.5"
+# ubuntu:
+# - event: syscollector_install_package_alert_apt
+# parameters:
+# PACKAGE_NAME: "rclone"
+# PACKAGE_VERSION: "1.49.5"
+# body:
+# tasks:
+# - install_package:
+# package:
+# centos:
+# amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.rpm
+# ubuntu:
+# amd64: https://downloads.rclone.org/v1.50.0/rclone-v1.50.0-linux-amd64.deb
+# check_alerts:
+# centos:
+# - event: syscollector_upgrade_package_alert_yum
+# parameters:
+# PACKAGE_NAME: "rclone"
+# PACKAGE_VERSION: "1.50.0"
+# ubuntu:
+# - event: syscollector_install_package_alert_apt
+# parameters:
+# PACKAGE_NAME: "rclone"
+# PACKAGE_VERSION: "1.50.0"
+# teardown:
+# - remove_package:
+# package:
+# centos: rclone
+# ubuntu: rclone
# ----
diff --git a/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml b/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml
index 27a2b3bbb9..e3c38cdb8d 100644
--- a/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml
+++ b/tests/end_to_end/test_vulnerability_detector/data/configurations/manager.yaml
@@ -1,60 +1,40 @@
- sections:
- section: vulnerability-detector
elements:
+ - interval:
+ value: 5m
- enabled:
value: 'yes'
- run_on_start:
value: 'yes'
- # - provider:
- # attributes:
- # - name: 'redhat'
- # elements:
- # - enabled:
- # value: 'yes'
- # - os:
- # attributes:
- # - path: CUSTOM_REDHAT_OVAL_FEED
- # value: OS
- # - path:
- # value: CUSTOM_REDHAT_JSON_FEED
- provider:
attributes:
- - name: 'canonical'
+ - name: 'redhat'
elements:
- enabled:
- value: 'no'
+ value: 'yes'
+ - os:
+ value: 7
- provider:
attributes:
- - name: 'debian'
+ - name: 'canonical'
elements:
- enabled:
- value: 'no'
+ value: 'yes'
+ - os:
+ value: jammy
- provider:
attributes:
- name: 'msu'
elements:
- enabled:
- value: 'no'
- - provider:
- attributes:
- - name: 'alas'
- elements:
- - enabled:
- value: 'no'
+ value: 'yes'
- provider:
attributes:
- - name: 'arch'
+ - name: 'nvd'
elements:
- enabled:
- value: 'no'
- # - provider:
- # attributes:
- # - name: 'nvd'
- # elements:
- # - enabled:
- # value: 'yes'
- # - path:
- # value: CUSTOM_NVD_JSON_FEED
+ value: 'yes'
- section: sca
elements:
diff --git a/tests/end_to_end/test_vulnerability_detector/data/regex.yaml b/tests/end_to_end/test_vulnerability_detector/data/regex.yaml
index 16b9546824..fdce5052d2 100644
--- a/tests/end_to_end/test_vulnerability_detector/data/regex.yaml
+++ b/tests/end_to_end/test_vulnerability_detector/data/regex.yaml
@@ -1,8 +1,8 @@
### Syscollector Events
-syscollector_first_scan_start:
+syscollector_scan_start:
regex: ".*INFO: Starting evaluation."
-syscollector_first_scan_end:
+syscollector_scan_end:
regex: ".*INFO: Starting evaluation."
syscollector_install_package_alert_yum:
@@ -13,22 +13,6 @@ syscollector_install_package_alert_apt:
regex: '.*New dpkg \(Debian Package\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION"'
parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME']
-
-# syscollector_remove_package_alert_yum:
-# regex: ".*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION"
-# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION']
-
-# syscollector_remove_package_alert_apt:
-# regex: ".*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION""
-# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION']
-
-
-
-
-# syscollector_upgrade_package_alert_yum:
-# regex: ".*installed.*agent".*"name":"HOST_NAME".*Installed: PACKAGE_NAME.*PACKAGE_VERSION"
-# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION']
-
-# syscollector_upgrade_package_alert_apt:
-# regex: ".*New dpkg \\(Debian Package\\) installed.*.*agent".*"name":"HOST_NAME.*package":"PACKAGE_NAME","arch":"amd64","version":"PACKAGE_VERSION""
-# variables: ['PACKAGE_NAME', 'PACKAGE_VERSION']
+syscollector_upgrade_package_alert_yum:
+ regex: '.*Yum package updated.*agent".*"name":"HOST_NAME".*Updated: PACKAGE_NAME.*PACKAGE_VERSION'
+ parameters: ['PACKAGE_NAME', 'PACKAGE_VERSION', 'HOST_NAME']
diff --git a/tests/end_to_end/test_vulnerability_detector/inventory.yaml b/tests/end_to_end/test_vulnerability_detector/inventory.yaml
deleted file mode 100644
index 419d4933e1..0000000000
--- a/tests/end_to_end/test_vulnerability_detector/inventory.yaml
+++ /dev/null
@@ -1,186 +0,0 @@
-manager:
- hosts:
- manager1:
- ip: 172.31.8.185
- version: 4.4.5
- revision: 1
- repository: live
- type: master
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 4
- memory: 8192
- ansible_host: 172.31.8.185
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- manager2:
- ip: 172.31.7.16
- version: 4.4.5
- revision: 1
- repository: live
- type: worker
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 2
- memory: 4096
- ansible_host: 172.31.7.16
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- vars: {}
-filebeat:
- hosts:
- manager1:
- ip: 172.31.8.185
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 4
- memory: 8192
- ansible_host: 172.31.8.185
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- manager2:
- ip: 172.31.7.16
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 2
- memory: 4096
- ansible_host: 172.31.7.16
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- vars: {}
-indexer:
- hosts:
- manager1:
- ip: 172.31.8.185
- version: 4.4.5
- revision: 1
- repository: live
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 4
- memory: 8192
- ansible_host: 172.31.8.185
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- indexer_user: admin
- indexer_password: changeme
- vars: {}
-qa_framework:
- hosts:
- manager1:
- ip: 172.31.8.185
- qa_repository_reference: enhacement/4590-vd-basic-test-cases
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 4
- memory: 8192
- ansible_host: 172.31.8.185
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- manager2:
- ip: 172.31.7.16
- qa_repository_reference: enhacement/4590-vd-basic-test-cases
- os: ubuntu_22
- service: EC2
- resources:
- cpu: 2
- memory: 4096
- ansible_host: 172.31.7.16
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- ip-172-31-8-185:
- ip: 172.31.15.144
- qa_repository_reference: enhacement/4590-vd-basic-test-cases
- os: centos_7
- service: EC2
- ansible_host: 172.31.15.144
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.11
- ip-172-31-6-71:
- ip: 172.31.6.71
- qa_repository_reference: enhacement/4590-vd-basic-test-cases
- os: ubuntu_22
- service: EC2
- ansible_host: 172.31.6.71
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- vars: {}
-agent:
- children:
- linux:
- hosts:
- ip-172-31-8-185:
- ip: 172.31.15.144
- version: 4.4.5
- revision: 1
- repository: live
- manager: manager1
- os: centos_7
- service: EC2
- ansible_host: 172.31.15.144
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.11
- manager_ip: 172.31.8.185
- arch: amd64
- ip-172-31-6-71:
- ip: 172.31.6.71
- version: 4.4.5
- revision: 1
- repository: live
- manager: manager1
- os: ubuntu_22
- service: EC2
- ansible_host: 172.31.6.71
- ansible_user: qa
- ansible_connection: ssh
- os_name: linux
- ansible_become: 'true'
- ansible_python_interpreter: /usr/local/bin/python3.10
- manager_ip: 172.31.8.185
- arch: amd64
- macos:
- hosts: {}
- solaris:
- hosts: {}
-
-all:
- vars:
- ansible_ssh_common_args: -o StrictHostKeyChecking=no
- ansible_winrm_server_cert_validation: ignore
- ansible_ssh_private_key_file: /home/rebits/.ssh/JenkinsEphemeral2.pem
\ No newline at end of file
diff --git a/tests/end_to_end/test_vulnerability_detector/test_scans.py b/tests/end_to_end/test_vulnerability_detector/test_scans.py
index 84003cc70f..1b32481e5b 100644
--- a/tests/end_to_end/test_vulnerability_detector/test_scans.py
+++ b/tests/end_to_end/test_vulnerability_detector/test_scans.py
@@ -1,25 +1,25 @@
-import pytest
-import os
-import pytest
-import os
-import subprocess
import argparse
-import ansible_runner
import base64
+import os
import re
+import subprocess
+import tempfile
from multiprocessing.pool import ThreadPool
-from wazuh_testing.tools.configuration import (
- load_configuration_template, set_section_wazuh_conf
-)
+
+import pytest
+import ansible_runner
import xml.dom.minidom
import yaml
-import tempfile
-
-from wazuh_testing.tools.system import HostManager
from ansible.inventory.manager import InventoryManager
from ansible.parsing.dataloader import DataLoader
-from wazuh_testing.tools.monitoring import HostMonitor
+from wazuh_testing.tools.configuration import (
+ load_configuration_template, set_section_wazuh_conf
+)
+from wazuh_testing.tools.monitoring import HostMonitor
+from wazuh_testing.tools.system import HostManager
+from wazuh_testing.api import make_api_call, get_token_login_api
+from wazuh_testing.end_to_end import get_alert_indexer_api
current_dir = os.path.dirname(__file__)
configurations_dir = os.path.join(current_dir, "data", "configurations")
@@ -27,17 +27,12 @@
local_path = os.path.dirname(os.path.abspath(__file__))
tmp_path = os.path.join(local_path, 'tmp')
regex_path = os.path.join(current_dir, 'data', 'regex.yaml')
+STATE_INDEX_NAME = 'agents_state_index'
with open(os.path.join(current_dir, 'cases.yaml'), 'r') as cases_file:
cases = yaml.load(cases_file, Loader=yaml.FullLoader)
-
-packages_manager = {
- 'centos': 'yum',
- 'ubuntu': 'apt'
-}
-
configurations_paths = {
'manager': os.path.join(configurations_dir, 'manager.yaml'),
'agent': os.path.join(configurations_dir, 'agent.yaml')
@@ -55,183 +50,236 @@
}
-def get_event_regex(event, operation_data=None):
- regexes = {}
- with open(regex_path, 'r') as regex_file:
- regexes = yaml.load(regex_file, Loader=yaml.FullLoader)
-
- expected_event = regexes[event['event']]
- expected_regex = expected_event['regex']
- if 'parameters' in expected_event and not 'parameters' in event:
- raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}")
- elif 'parameters' in event:
- for parameter in expected_event['parameters']:
- expected_regex = expected_regex.replace(parameter, event['parameters'][parameter])
+complete_list = [ (case['preconditions'], case['body'], case['teardown']) for case in cases]
+dependencies = [None if 'depends' not in case else pytest.mark.depends(name=case['depend']) for case in cases]
+list_ids = [ case['id'] for case in cases]
- return expected_regex
@pytest.fixture(scope='module')
-def get_host_manager(request):
- inventory_path = request.config.getoption('--inventory-path')
- host_manager = HostManager(inventory_path)
+def setup_vulnerability_tests(host_manager):
+ # Configure managers and agents
+ hosts_configuration_backup = backup_configurations(host_manager)
+ configure_environment_manager(host_manager, load_vulnerability_detector_configurations())
- return host_manager, inventory_path
+ # Restart managers and stop agents
+ control_environment(host_manager, 'stop', ['agent'])
+ control_environment(host_manager, 'restart', ['manager'])
+ # Wait until VD is updated
+ wait_until_vd_is_updated(host_manager)
-@pytest.fixture(scope='module')
-def restart_environment(get_host_manager):
- host_manager, inventory = get_host_manager
- for host in host_manager.get_group_hosts('manager'):
- host_manager.handle_wazuh_services(host, 'restart')
+ # Truncate alerts and logs of managers and agents
+ truncate_logs(host_manager)
+
+ # Start agents
+ control_environment(host_manager, 'start', ['agent'])
- for host in host_manager.get_group_hosts('agent'):
- host_manager.handle_wazuh_services(host, 'restart')
+ yield
+ restore_backup(host_manager, hosts_configuration_backup)
-@pytest.fixture(scope='module', autouse=False)
-def configure_environment_manager(get_host_manager):
- def configure_host(host):
- host_variables = host.get_vars()
+def backup_configurations(host_manager):
+ backup_configurations = {}
+ for host in host_manager.get_group_hosts('all'):
+ host_variables = host_manager.get_host_variables(host)
host_os = host_variables['os_name']
configuration_file_path = configuration_filepath_os[host_os]
+ current_configuration = host_manager.get_file_content(str(host), configuration_file_path)
+ backup_configurations[str(host)] = current_configuration
+ return backup_configurations
+
+
+def restore_backup(host_manager, backup_configurations):
+ for host in host_manager.get_group_hosts('all'):
+ host_variables = host_manager.get_host_variables(host)
+ host_os = host_variables['os_name']
+ configuration_file_path = configuration_filepath_os[host_os]
+ host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[str(host)])
+
+
+def load_vulnerability_detector_configurations():
+ return {
+ 'agent': load_configuration_template(configurations_paths['agent'], [{}], [{}]),
+ 'manager': load_configuration_template(configurations_paths['manager'], [{}], [{}])
+ }
+
+def configure_environment_manager(host_manager, configurations):
+ def configure_host(host, host_configuration_role):
+ host_os = host_manager.get_host_variables(host)['os_name']
+ configuration_file_path = configuration_filepath_os[host_os]
+ host_groups = host_manager.get_host_groups(host)
host_configuration = None
- host_groups = [str(group) for group in host.get_groups()]
if 'manager' in host_groups:
- host_configuration = configurations_paths['manager']
+ host_configuration = host_configuration_role['manager']
elif 'agent' in host_groups:
- host_configuration = configurations_paths['agent']
+ host_configuration = host_configuration_role['agent']
current_configuration = host_manager.get_file_content(str(host), configuration_file_path)
- backup_configurations[host] = current_configuration
- new_configuration_template = load_configuration_template(host_configuration, [{}], [{}])
- new_configuration = set_section_wazuh_conf(new_configuration_template[0].get('sections'), current_configuration.split("\n"))
+ new_configuration = set_section_wazuh_conf(host_configuration[0].get('sections'), current_configuration.split("\n"))
+
new_configuration = [line for line in new_configuration if line.strip() != ""]
dom = xml.dom.minidom.parseString(''.join(new_configuration))
new_configuration = "\n".join(dom.toprettyxml().split("\n")[1:])
host_manager.modify_file_content(str(host), configuration_file_path, new_configuration)
- backup_configurations = {}
+ loader = DataLoader()
+ configure_environment_parallel_map = [ (host, configurations) for host in host_manager.get_group_hosts('all')]
- host_manager, inventory = get_host_manager
+ with ThreadPool() as pool:
+ pool.starmap(configure_host, configure_environment_parallel_map)
- loader = DataLoader()
- inventory_manager = InventoryManager(loader=loader, sources=inventory)
- all_hosts = inventory_manager.get_hosts()
+def control_environment(host_manager, operation, group_list):
+ for group in group_list:
+ for host in host_manager.get_group_hosts(group):
+ host_manager.handle_wazuh_services(host, operation)
- with ThreadPool() as pool:
- pool.map(configure_host, all_hosts)
+def get_event_regex(event, operation_data=None):
+ """
+ """
+ regexes = {}
+ with open(regex_path, 'r') as regex_file:
+ regexes = yaml.load(regex_file, Loader=yaml.FullLoader)
- yield
+ expected_event = regexes[event['event']]
+ expected_regex = expected_event['regex']
- for host in all_hosts:
- host_variables = host.get_vars()
- host_os = host_variables['os_name']
- configuration_file_path = configuration_filepath_os[host_os]
+ if 'parameters' in expected_event and not 'parameters' in event:
+ raise Exception(f"Not provided enaugh data to create regex. Missing {event['PARAMETERS']}")
+ elif 'parameters' in event:
+ for parameter in expected_event['parameters']:
+ expected_regex = expected_regex.replace(parameter, event['parameters'][parameter])
- host_manager.modify_file_content(str(host), configuration_file_path, backup_configurations[host])
-complete_list = [ (case['preconditions'], case['body'], case['teardown']) for case in cases]
-list_ids = [ case['id'] for case in cases]
+ return expected_regex
+
+
+@pytest.fixture(scope='module')
+def host_manager(request):
+ inventory_path = request.config.getoption('--inventory-path')
+ manager = HostManager(inventory_path)
+
+ return manager
+def truncate_agents_logs(host_manager):
+ for agent in host_manager.get_group_hosts('agent'):
+ host_os_name = host_manager.get_host_variables(agent)['os_name']
+ host_manager.truncate_file(agent, logs_filepath_os[host_os_name])
-def launch_remote_operation(host, operation, operation_data, hm, inventory):
+def truncate_managers_logs(host_manager):
+ for agent in host_manager.get_group_hosts('manager'):
+ host_os_name = host_manager.get_host_variables(agent)['os_name']
+ host_manager.truncate_file(agent, logs_filepath_os[host_os_name])
+
+def truncate_logs(host_manager):
+ # for manager in host_manager.get_group_hosts('manager'):
+ # host_manager.truncate_file(manager, '/var/ossec/logs/alerts/alerts.json')
+ truncate_managers_logs(host_manager)
+ truncate_agents_logs(host_manager)
+
+
+def wait_until_vd_is_updated(host_manager):
+ monitoring_data = {}
+ for manager in host_manager.get_group_hosts('manager'):
+ monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Starting vulnerability scan', 600)
+
+ monitoring_events(host_manager, monitoring_data)
+
+
+def wait_until_vuln_scan_finished(host_manager):
+ monitoring_data = {}
+ for manager in host_manager.get_group_hosts('manager'):
+ monitoring_data = generate_monitoring_logs_manager(host_manager, manager, 'Vulnerability scan finished.', 600)
+
+ monitoring_events(host_manager, monitoring_data)
+
+
+def launch_remote_operation(host, operation, operation_data, host_manager):
print(f"Operation {operation} in {host} with {operation_data}")
- host_os_name = hm.get_host_variables(host)['os'].split('_')[0]
- host_os_arch = hm.get_host_variables(host)['arch']
+ host_os_name = host_manager.get_host_variables(host)['os'].split('_')[0]
+ host_os_arch = host_manager.get_host_variables(host)['arch']
+
+ system = host_manager.get_host_variables(host)['os_name']
+ if system == 'linux':
+ system = host_manager.get_host_variables(host)['os'].split('_')[0]
+
+
if operation == 'install_package':
package_data = operation_data['package']
package_url = package_data[host_os_name][host_os_arch]
- package_manager = packages_manager[host_os_name]
- print(f"Install package {host} {package_url} {package_manager}")
- hm.install_package(host, package_url, package_manager )
+ host_manager.install_package(host, package_url, system )
elif operation == 'remove_package':
package_data = operation_data['package']
package_name = package_data[host_os_name]
- package_manager = packages_manager[host_os_name]
- hm.remove_package(host, package_name, package_manager )
+ host_manager.remove_package(host, package_name, system)
-def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager, inventory):
+def launch_remote_sequential_operation_on_agent(agent, task_list, host_manager):
if task_list:
for task in task_list:
task_keys = list(task.keys())
task_values = list(task.values())
operation, operation_data = task_keys[0], task_values[0]
- launch_remote_operation(agent, operation, operation_data, host_manager, inventory)
+ launch_remote_operation(agent, operation, operation_data, host_manager)
-def launch_parallel_operations(task_list, host_manager, inventory, group='agent'):
+def launch_parallel_operations(task_list, host_manager, group='agent'):
agents = host_manager.get_group_hosts('agent')
- parallel_configuration = [(agent, task_list, host_manager, inventory) for agent in agents]
+ parallel_configuration = [(agent, task_list, host_manager) for agent in agents]
with ThreadPool() as pool:
# Use the pool to map the function to the list of hosts
pool.starmap(launch_remote_sequential_operation_on_agent, parallel_configuration)
@pytest.fixture(scope='function')
-def setup(preconditions, teardown, get_host_manager):
- hm, inventory = get_host_manager
+def setup(preconditions, teardown, host_manager):
+ host_manager = host_manager
if preconditions:
- launch_parallel_operations(preconditions['tasks'], hm, inventory)
+ launch_parallel_operations(preconditions['tasks'], host_manager)
if 'check_alerts' in preconditions:
monitoring_data = {}
- for agent in hm.get_group_hosts('agent'):
- host_os_name = hm.get_host_variables(agent)['os'].split('_')[0]
+ for agent in host_manager.get_group_hosts('agent'):
+ host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0]
check_alerts_data = preconditions['check_alerts'][host_os_name]
- for event in check_alerts_data:
- if not hm.get_host_variables(agent)['manager'] in monitoring_data:
- monitoring_data[hm.get_host_variables(agent)['manager']] = []
- check_alerts_data['parameters']['HOST_NAME'] = agent
-
- regex = get_event_regex(preconditions['check_alerts'][host_os_name])
- monitoring_data[hm.get_host_variables('manager')] = [{
- 'regex': regex,
- 'path': '/var/ossec/logs/alerts/alerts.json',
- 'timeout': 30
- }]
-
-
- for agent in hm.get_group_hosts('agent'):
- host_os_name = hm.get_host_variables(agent)['os'].split('_')[0]
-
- for event in metadata_agent:
- event['parameters']['HOST_NAME'] = agent
- monitoring_element = {
- 'regex': get_event_regex(event),
- 'path': '/var/ossec/logs/alerts/alerts.json',
- 'timeout': 120
- }
-
- if 'parameters' in metadata_agent:
- monitoring_element['parameters'] = metadata_agent['parameters']
-
- monitoring_data[hm.get_host_variables(agent)['manager']].append(monitoring_element)
-
+ for event in check_alerts_data:
+ if not host_manager.get_host_variables(agent)['manager'] in monitoring_data:
+ monitoring_data[host_manager.get_host_variables(agent)['manager']] = []
+ if not 'parameters' in event:
+ event['parameters'] = {}
+ event['parameters']['HOST_NAME'] = agent
+ regex = get_event_regex(event)
+ monitoring_element = {
+ 'regex': regex,
+ 'path': '/var/ossec/logs/alerts/alerts.json',
+ 'timeout': 30,
+ 'parameters': event['parameters']
+ }
+ monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element)
- monitoring_events(get_host_manager, monitoring_data)
+ monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element)
+ monitoring_events(host_manager, monitoring_data)
yield
if teardown:
- launch_parallel_operations(teardown, hm, inventory)
+ launch_parallel_operations(teardown, host_manager)
- # for host in hm.get_group_hosts('manager'):
- # hm.modify_file_content(host, path='/var/ossec/logs/alerts/alerts.json', content='')
+ for host in host_manager.get_group_hosts('manager'):
+ host_manager.truncate_file(host, '/var/ossec/logs/alerts/alerts.json')
def create_temp_file(content):
@@ -241,9 +289,9 @@ def create_temp_file(content):
return temp_file_path
-def monitoring_events(get_host_manager, monitoring_data):
- hm, inventory = get_host_manager
+def monitoring_events(host_manager, monitoring_data):
monitoring_file_content = ''
+ results = {}
for host, data in monitoring_data.items():
monitoring_file_content += f"{host}:\n"
@@ -252,87 +300,189 @@ def monitoring_events(get_host_manager, monitoring_data):
monitoring_file_content += f" path: '{monitoring_event['path']}'\n"
monitoring_file_content += f" timeout: {monitoring_event['timeout']}\n"
- temp_file = create_temp_file(monitoring_file_content)
+ temp_file = create_temp_file(monitoring_file_content)
+ try:
+ results.update(HostMonitor(inventory_path=host_manager.get_inventory_path(), messages_path=temp_file, tmp_path=tmp_path).run())
+ except TimeoutError:
+ pass
- HostMonitor(inventory_path=inventory, messages_path=temp_file, tmp_path=tmp_path).run()
+ os.remove(temp_file)
- os.remove(temp_file)
+ return results
-@pytest.mark.dependency()
-def test_syscollector_first_scan(get_host_manager):
- """
- """
- hm, inventory = get_host_manager
- regex_info = {
- 'event': 'syscollector_first_scan_start'
- }
-
+def generate_monitoring_logs_all_agent(host_manager, regex_list, timeout_list):
monitoring_data = {}
- regex = get_event_regex(regex_info)
- for agent in hm.get_group_hosts('agent'):
- host_os_name = hm.get_host_variables(agent)['os'].split('_')[0]
- monitoring_data[agent] = [{
- 'regex': regex,
- 'path': logs_filepath_os['linux'],
- 'timeout': 120
- }]
+ for agent in host_manager.get_group_hosts('agent'):
+ monitoring_data[agent] = []
+ for index, regex_index in enumerate(regex_list):
+ os_name = host_manager.get_host_variables(agent)['os_name']
+ monitoring_data[agent].append({
+ 'regex': regex_index,
+ 'path': logs_filepath_os[os_name],
+ 'timeout': timeout_list[index]
- monitoring_events(get_host_manager, monitoring_data)
+ })
+ print(monitoring_data)
+ return monitoring_data
-@pytest.mark.dependency(depends=["test_syscollector_first_scan"])
-def test_syscollector_second_scan(get_host_manager):
- """
- """
- hm, inventory = get_host_manager
+
+def generate_monitoring_logs_manager(host_manager, manager, regex, timeout):
monitoring_data = {}
- regex_info = {
- 'event': 'syscollector_first_scan_start'
- }
- regex = get_event_regex(regex_info)
- for agent in hm.get_group_hosts('agent'):
- host_os_name = hm.get_host_variables(agent)['os'].split('_')[0]
- monitoring_data[agent] = [{
- 'regex': regex,
- 'path': logs_filepath_os['linux'],
- 'timeout': 120
- }]
+ os_name = host_manager.get_host_variables(manager)['os_name']
+ monitoring_data[manager] = [{
+ 'regex': regex,
+ 'path': logs_filepath_os[os_name],
+ 'timeout': timeout
- monitoring_events(get_host_manager, monitoring_data)
+ }]
+ return monitoring_data
-# @pytest.mark.dependency(depends=["test_syscollector_second_scan"])
-@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids)
-def test_vulnerability_detector_scans(preconditions, body, teardown, setup, get_host_manager):
- """
- """
- hm, inventory = get_host_manager
- launch_parallel_operations(body['tasks'], hm, inventory)
- metadata = body['check_alerts']
+
+def generate_monitoring_alerts_all_agent(host_manager, events_metadata):
monitoring_data = {}
- for agent in hm.get_group_hosts('agent'):
- host_os_name = hm.get_host_variables(agent)['os'].split('_')[0]
- metadata_agent = metadata[host_os_name]
- if not hm.get_host_variables(agent)['manager'] in monitoring_data:
- monitoring_data[hm.get_host_variables(agent)['manager']] = []
- for event in metadata_agent:
+ for agent in host_manager.get_group_hosts('agent'):
+ host_os_name = host_manager.get_host_variables(agent)['os'].split('_')[0]
+ metadata_agent = events_metadata[host_os_name]
+
+ if not host_manager.get_host_variables(agent)['manager'] in monitoring_data:
+ monitoring_data[host_manager.get_host_variables(agent)['manager']] = []
+
+ for event in metadata_agent[agent.get_host_variables(agent)['arch']]:
event['parameters']['HOST_NAME'] = agent
monitoring_element = {
'regex': get_event_regex(event),
'path': '/var/ossec/logs/alerts/alerts.json',
- 'timeout': 120
+ 'timeout': 120,
}
if 'parameters' in metadata_agent:
monitoring_element['parameters'] = metadata_agent['parameters']
- monitoring_data[hm.get_host_variables(agent)['manager']].append(monitoring_element)
+ monitoring_data[host_manager.get_host_variables(agent)['manager']].append(monitoring_element)
- print(monitoring_data)
+def get_master_ip(host_manager):
+ for manager in host_manager.get_group_hosts('manager'):
+ if host_manager.get_host_variables(manager)['type'] == 'master':
+ return host_manager.get_host_variables(manager)['ip']
+
+
+def get_state_index(host_manager):
+ url = f"https://{get_master_ip(host_manager)}:9200/{STATE_INDEX_NAME}_search?"
+
+ response = requests.get(url=url, params={'pretty': 'true'}, json=query, verify=False,
+ auth=requests.auth.HTTPBasicAuth(credentials['user'], credentials['password']))
+
+ return response.text
+
+
+def get_agents_id(host_manager):
+ API_PROTOCOL = 'https'
+ API_HOST = get_master_ip(host_manager)
+ API_PORT = '55000'
+ API_USER = 'wazuh'
+ API_PASS = 'wazuh'
+ API_LOGIN_ENDPOINT = '/security/user/authenticate'
+
+ response_token = get_token_login_api(API_PROTOCOL, API_HOST, API_PORT, API_USER, API_PASS, API_LOGIN_ENDPOINT,
+ timeout=10, login_attempts=3, sleep_time=1)
+
+ agent_output = make_api_call(get_master_ip(host_manager), endpoint='/agents', token=response_token).json()
+ agents_ids = {}
+ for agent in agent_output['data']['affected_items']:
+ agents_ids[agent['name']] = agent['id']
+
+ return agents_ids
+
+
+def get_agents_vulnerabilities(host_manager):
+ API_PROTOCOL = 'https'
+ API_HOST = get_master_ip(host_manager)
+ API_PORT = '55000'
+ API_USER = 'wazuh'
+ API_PASS = 'wazuh'
+ API_LOGIN_ENDPOINT = '/security/user/authenticate'
+
+ response_token = get_token_login_api(API_PROTOCOL, API_HOST, API_PORT, API_USER, API_PASS, API_LOGIN_ENDPOINT,
+ timeout=10, login_attempts=3, sleep_time=1)
+
+ agents_ids = get_agents_id(host_manager)
+ agents_vuln = {}
+ for agent in host_manager.get_group_hosts('agent'):
+ agents_vuln[agent] = make_api_call(get_master_ip(host_manager), endpoint=f"/vulnerability/{agents_ids[agent]}", token=response_token).json()['data']['affected_items']
+
+ return agents_vuln
+
+@pytest.mark.dependency()
+def test_syscollector_initial_scans(host_manager):
+ # The Agent's syscollector scan is run
+ monitoring_data = generate_monitoring_logs_all_agent(host_manager,
+ [get_event_regex({'event': 'syscollector_scan_start'}),
+ get_event_regex({'event': 'syscollector_scan_end'})],
+ [20, 20])
+
+ results = monitoring_events(host_manager, monitoring_data)
+
+ assert all(results.values()), f"Expected message was not triggered for some agents, {results}"
+
+ truncate_agents_logs(host_manager)
+
+ wait_until_vuln_scan_finished(host_manager)
+
+ # Check vulnerabilities for agent
+ agents_vuln_before_second_scan = get_agents_vulnerabilities(host_manager)
+ for agent, vuln in agents_vuln_before_second_scan.items():
+ assert vuln, f"No vulnerabilities were detected for agent {agent}"
+
+ # Check Agent's System states are stored
+ state_index_content_before_second_scan = get_state_index(host_manager)
+
+ # Compare agents_vuln_before_second_scan with state_index_content
+ # To Do
+
+ # The Agent's syscollector scan is run
+ monitoring_data = generate_monitoring_logs_all_agent(host_manager,
+ [get_event_regex({'event': 'syscollector_scan_start'}),
+ get_event_regex({'event': 'syscollector_scan_end'})],
+ [60, 60])
+
+ results = monitoring_events(host_manager, monitoring_data)
+
+ assert all(results.values()), f"Expected message was not triggered for some agents, {results}"
+
+ truncate_managers_logs(host_manager)
+
+ wait_until_vuln_scan_finished(host_manager)
+
+ agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager)
+
+
+ assert agents_vuln_before_second_scan == agents_vuln_after_second_scan
+
+ # Check Agent's System states are stored
+ state_index_content_after_second_scan = get_state_index(host_manager)
+
+ assert state_index_content_after_second_scan == state_index_content_before_second_scan
+
+
+# @pytest.mark.dependency(depends=["test_syscollector_second_scan"])
+@pytest.mark.parametrize('preconditions, body, teardown', complete_list, ids=list_ids)
+def test_vulnerability_detector_scans(preconditions, body, teardown, setup, host_manager):
+ # Launch tests tasks
+ launch_parallel_operations(body['tasks'], host_manager)
+
+ # Check vulnerability
+ agents_vuln_after_second_scan = get_agents_vulnerabilities(host_manager)
+ # Check alert in Wazuh Indexer
+ # monitoring_data = generate_monitoring_alerts_all_agent(host_manager, body['check_alerts'])
+ expected_alerts = body['check_agent_alert_indexer']
+ # Check agent System state
- monitoring_events(get_host_manager, monitoring_data)
+ results = monitoring_events(host_manager, monitoring_data)
+ assert all(results.values()), f"Expected message was not triggered for some agents, {results}"