Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new system test for GET /cluster/ruleset/synchronization endpoint #3180

Merged
merged 4 commits into from
Aug 24, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ Release report: TBD

## Added
- Add Integratord IT - new test_integratord suite ([#3125](https://github.com/wazuh/wazuh-qa/pull/3125)) \- (Framework + Tests)
- Add system test to check synchronization status in the cluster ([#3180](https://github.com/wazuh/wazuh-qa/pull/3180)) \- (Framework + Tests)

### Changed

Expand Down
2 changes: 1 addition & 1 deletion deps/wazuh_testing/wazuh_testing/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@
REMOTE_STATISTICS_FILE = os.path.join(WAZUH_PATH, 'var', 'run', 'wazuh-remoted.state')
ANALYSIS_STATISTICS_FILE = os.path.join(WAZUH_PATH, 'var', 'run', 'wazuh-analysisd.state')
UPGRADE_PATH = os.path.join(WAZUH_PATH, 'var', 'upgrade')
PYTHON_PATH = os.path.join(WAZUH_PATH, 'framework', 'python')
AGENT_AUTH_BINARY_PATH = os.path.join(WAZUH_PATH, 'bin', 'agent-auth')


try:
import grp
import pwd
Expand Down
63 changes: 48 additions & 15 deletions deps/wazuh_testing/wazuh_testing/tools/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def move_file(self, host: str, src_path: str, dest_path: str = '/var/ossec/etc/o
host (str): Hostname
src_path (str): Source path
dest_path (str): Destination path
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be applied.
"""
self.get_host(host).ansible("copy", f"src={src_path} dest={dest_path} owner=wazuh group=wazuh mode=0775",
check=check)
Expand All @@ -56,7 +56,8 @@ def add_block_to_file(self, host: str, path: str, replace: str, before: str, aft
replace (str): Text to be inserted in the file
before (str): Lower stop of the block to be replaced
after (str): Upper stop of the block to be replaced
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`.
"""
replace = f'{after}{replace}{before}'
self.get_host(host).ansible("replace", fr"path={path} regexp='{after}[\s\S]+{before}' replace='{replace}'",
Expand All @@ -76,7 +77,8 @@ def control_service(self, host: str, service: str = 'wazuh', state: str = "start
host (str): Hostname
service (str): Service to be controlled
state (str): Final state in which service must end
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`.
"""
if service == 'wazuh':
service = 'wazuh-agent' if 'agent' in host else 'wazuh-manager'
Expand All @@ -88,10 +90,22 @@ def clear_file(self, host: str, file_path: str, check: bool = False):
Args:
host (str): Hostname
file_path (str): File path to be truncated
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`
"""
self.get_host(host).ansible("copy", f"dest={file_path} content='' force=yes", check=check)

def clear_file_without_recreate(self, host: str, file_path: str, check: bool = False):
"""Truncate the specified file without recreating it.

Args:
host (str): Hostname
file_path (str): File path to be truncated
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`
"""
self.get_host(host).ansible('shell', f"truncate -s 0 {file_path}", check=check)

def get_file_content(self, host: str, file_path: str):
"""Get the content of the specified file.

Expand Down Expand Up @@ -138,10 +152,11 @@ def apply_api_config(self, api_config: str or dict = None, host_list: list = Non
"""Apply the API configuration described in the yaml file or in the dictionary.

Args:
api_config (str,dict): Configuration to be applied. If it is a string, it will try to load the YAML in that path. If it is a dictionary, it will apply that configuration to every host in `host_list`.
api_config (str,dict): Configuration to be applied. If it is a string, it will try to load the YAML in that
path. If it is a dictionary, it will apply that configuration to every host in `host_list`.
host_list (list, optional): List of hosts to apply the configuration in. Default `None`
dest_path (str, optional): Path where the API configuration is. Default `/var/ossec/api/configuration/api.yaml`
clear_log (bool, optional): Boolean to decide if it must truncate the 'api.log' after restarting the API or not.
dest_path (str, optional): Path where the API configuration is.
clear_log (bool, optional): Boolean to decide if it must truncate the 'api.log' after restarting the API.
"""
if isinstance(api_config, str):
with open(api_config, 'r') as config_yml:
Expand All @@ -167,7 +182,7 @@ def get_api_token(self, host, user='wazuh', password='wazuh', auth_context=None,
password (str, optional): API password. Default `wazuh`
auth_context (dict, optional): Authorization context body. Default `None`
port (int, optional): API port. Default `55000`
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html),
check (bool, optional): Ansible check mode("Dry Run"),
by default it is enabled so no changes will be applied. Default `False`

Returns:
Expand All @@ -183,10 +198,11 @@ def get_api_token(self, host, user='wazuh', password='wazuh', auth_context=None,
login_body = ''

try:
token_response = self.get_host(host).ansible('uri', f'url=https://localhost:{port}{login_endpoint} '
f'user={user} password={password} method={login_method} '
f'{login_body} validate_certs=no force_basic_auth=yes',
check=check)
token_response = self.get_host(host).ansible(
'uri',
f'url=https://localhost:{port}{login_endpoint} user={user} password={password} method={login_method} '
f'{login_body} validate_certs=no force_basic_auth=yes',
check=check)
return token_response['json']['data']['token']
except KeyError:
raise KeyError(f'Failed to get token: {token_response}')
Expand All @@ -201,7 +217,8 @@ def make_api_call(self, host, port=55000, method='GET', endpoint='/', request_bo
endpoint (str, optional): Request endpoint. It must start with '/'.. Default `/`
request_body ( dict, optional) : Request body. Default `None`
token (str, optional): Request token. Default `None`
check ( bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check ( bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`

Returns:
API response (dict) : Return the response in JSON format.
Expand All @@ -223,7 +240,8 @@ def run_command(self, host: str, cmd: str, check: bool = False):
Args:
host (str) : Hostname
cmd (str): Command to execute
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`

Returns:
stdout (str): The output of the command execution.
Expand All @@ -238,9 +256,24 @@ def run_shell(self, host: str, cmd: str, check: bool = False):
Args:
host (str) : Hostname
cmd (str): Shell command to execute
check (bool, optional): Ansible check mode("Dry Run")(https://docs.ansible.com/ansible/latest/user_guide/playbooks_checkmode.html), by default it is enabled so no changes will be applied. Default `False`
check (bool, optional): Ansible check mode("Dry Run"), by default it is enabled so no changes will be
applied. Default `False`

Returns:
stdout (str): The output of the command execution.
"""
return self.get_host(host).ansible('shell', cmd, check=check)['stdout']

def find_file(self, host: str, path: str, pattern: str, recurse: bool = False, use_regex: bool = False):
"""Search and return information of a file inside a path.
Args:
host (str): Hostname
path (str): Path in which to search for the file that matches the pattern.
pattern (str): Restrict the files to be returned to those whose basenames match the pattern specified.
recurse (bool): If target is a directory, recursively descend into the directory looking for files.
use_regex (bool): If no, the patterns are file globs (shell), if yes, they are python regexes.
Returns:
Files (list): List of found files.
"""
return self.get_host(host).ansible("find", f"paths={path} patterns={pattern} recurse={recurse} "
f"use_regex={use_regex}")
1 change: 1 addition & 0 deletions tests/system/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ required an specific testing environment located in `wazuh-qa/tests/system/provi
| test_cluster/test_agent_key_polling | basic_cluster |
| test_cluster/test_agent_files_deletion | basic_cluster |
| test_cluster/test_integrity_sync | agentless_cluster |
| test_cluster/test_ruleset_sync_status | agentless_cluster |
| test_jwt_invalidation | agentless_cluster |

### Test structure
Expand Down
56 changes: 56 additions & 0 deletions tests/system/test_cluster/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Copyright (C) 2015-2022, Wazuh Inc.
# Created by Wazuh, Inc. <info@wazuh.com>.
# This program is free software; you can redistribute it and/or modify it under the terms of GPLv2

import copy
import json
from functools import reduce
from operator import getitem

import pytest

from wazuh_testing.tools import PYTHON_PATH


@pytest.fixture(scope='module')
def update_cluster_json(request):
"""Update cluster.json file and restart cluster nodes.

Update cluster.json file in each node and restart it before running the test. Then, the original content
is restored and the cluster nodes are restarted again.

IMPORTANT: These variables must be defined in the module where this fixture is called:
- test_hosts (list): Cluster host names.
- host_manager (HostManager): Instance of HostManager.
- cluster_json_values (list of dicts): Each item of the list must follow the structure below. This example:
{'key': ['<dict_key_A>', '<dict_key_AA>'], 'value': <value>}
would replace this value:
{'dict_key_A': {'dict_key_AA': <REPLACED_VALUE>, 'dict_key_AB': 'unchanged_value', ...}, 'dict_key_B': ...}
"""
backup_json = {}
test_hosts = getattr(request.module, 'test_hosts')
host_manager = getattr(request.module, 'host_manager')
cluster_json_values = getattr(request.module, 'cluster_json_values')

for host in test_hosts:
# Find cluster.json path.
cluster_json = host_manager.find_file(host, path=PYTHON_PATH, recurse=True, pattern='cluster.json'
)['files'][0]['path']
cluster_conf = json.loads(host_manager.run_command(host, f"cat {cluster_json}"))
backup_json[host] = {'path': cluster_json, 'content': copy.deepcopy(cluster_conf)}

# Update dict/nested_dicts.
for item in cluster_json_values:
reduce(getitem, item['key'][:-1], cluster_conf)[item['key'][-1]] = item['value']
host_manager.modify_file_content(host=host, path=cluster_json, content=json.dumps(cluster_conf, indent=4))

# Restart manager.
host_manager.control_service(host=host, service='wazuh', state='restarted')

yield

# Restore cluster.json and restart.
for host in backup_json:
host_manager.modify_file_content(host=host, path=backup_json[host]['path'],
content=json.dumps(backup_json[host]['content'], indent=4))
host_manager.control_service(host=host, service='wazuh-manager', state='restarted')
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
wazuh-master:
- regex: .*\[Local integrity\] Starting.*
path: /var/ossec/logs/cluster.log
timeout: 120
- regex: .*\[Local integrity\] Finished in .*
path: /var/ossec/logs/cluster.log
timeout: 10
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
wazuh-worker1:
- regex: .*Integrity sync.*Starting.*
path: /var/ossec/logs/cluster.log
timeout: 240
- regex: .*Integrity sync.*Finished in.*
path: /var/ossec/logs/cluster.log
timeout: 30
wazuh-worker2:
- regex: .*Integrity sync.*Starting.*
path: /var/ossec/logs/cluster.log
timeout: 240
- regex: .*Integrity sync.*Finished in.*
path: /var/ossec/logs/cluster.log
timeout: 30
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# Copyright (C) 2015-2022, Wazuh Inc.
# Created by Wazuh, Inc. <info@wazuh.com>.
# This program is free software; you can redistribute it and/or modify it under the terms of GPLv2

import os
import uuid

from wazuh_testing.tools import WAZUH_PATH, WAZUH_LOGS_PATH
from wazuh_testing.tools.monitoring import HostMonitor
from wazuh_testing.tools.system import HostManager

# Hosts
test_hosts = ['wazuh-master', 'wazuh-worker1', 'wazuh-worker2']
worker_hosts = test_hosts[1:]

# Data paths
test_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data')
tmp_path = os.path.join(test_data_path, 'tmp')
inventory_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))),
'provisioning', 'agentless_cluster', 'inventory.yml')
host_manager = HostManager(inventory_path)
cluster_json_values = [
{'key': ['intervals', 'worker', 'sync_integrity'], 'value': 120},
{'key': ['intervals', 'master', 'recalculate_integrity'], 'value': 120},
]

rule_content = f"""
<!-- {str(uuid.uuid4())} -->
<group name="local,syslog,sshd,">
<rule id="100001" level="5">
<if_sid>5716</if_sid>
<srcip>1.1.1.1</srcip>
<description>sshd: authentication failed from IP 1.1.1.1.</description>
<group>authentication_failed,pci_dss_10.2.4,pci_dss_10.2.5,</group>
</rule>
</group>
"""


def get_sync_status(api_token):
"""Get ruleset sync status of cluster nodes.

Args:
api_token (str): Usable API token.

Returns:
list: Dictionaries containing node-name (str) and synced status (bool).
"""
response = host_manager.make_api_call(host=test_hosts[0], method='GET', token=api_token,
endpoint='/cluster/ruleset/synchronization')
assert response['status'] == 200, f"Failed when trying to obtain cluster sync status: {response}"
assert response['json']['data']['total_affected_items'] == len(test_hosts)
return response['json']['data']['affected_items']


def test_ruleset_sync_status(update_cluster_json):
"""Check if 'GET /cluster/ruleset/synchronization' API endpoint returns correct sync status.

Verify that, after changing a custom ruleset file in the master node and calling the API endpoint mentioned above,
the 'synced' status for all worker nodes in the response is False. Wait until an Integrity synchronization
is run. Now, the response for all workers should be 'synced: True'.
"""
api_token = host_manager.get_api_token(test_hosts[0])
for host in test_hosts:
host_manager.clear_file_without_recreate(host=host, file_path=os.path.join(WAZUH_LOGS_PATH, 'cluster.log'))

# Check that all workers are synced before starting.
assert all(item['synced'] for item in get_sync_status(api_token))

# Modify a custom rule file and verify that synced status is False for all workers.
host_manager.modify_file_content(host=test_hosts[0],
path=os.path.join(WAZUH_PATH, 'etc', 'rules', 'local_rules.xml'),
content=rule_content)
assert all(not item['synced'] for item in get_sync_status(api_token) if item['name'] != test_hosts[0])

# Wait until a Local Integrity task is run in the master and then, Integrity sync tasks are run in the workers.
HostMonitor(inventory_path=inventory_path, messages_path=os.path.join(test_data_path, 'master_messages.yaml'),
tmp_path=tmp_path).run()
HostMonitor(inventory_path=inventory_path, messages_path=os.path.join(test_data_path, 'worker_messages.yaml'),
tmp_path=tmp_path).run()

# Verify that synced status is True for all cluster nodes again.
assert all(item['synced'] for item in get_sync_status(api_token))