Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add new module to support migration tool. #3837

Merged
merged 18 commits into from
Mar 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ Release report: TBD
- Add test to check if active-response netsh generates alerts when firewall is disabled. ([#3787](https:/wazuh/wazuh-qa/pull/3787)) \- (Framework + Tests)
- Add new tests for logcollector 'ignore' and 'restrict' options ([#3582](https:/wazuh/wazuh-qa/pull/3582)) \- (Tests)
- Add 'Force reconnect' feature to agent_simulator tool. ([#3111](https:/wazuh/wazuh-qa/pull/3111)) \- (Tools)
- Add new module to support migration tool. ([#3837](https:/wazuh/wazuh-qa/pull/3837))

### Changed

Expand Down
4 changes: 3 additions & 1 deletion deps/wazuh_testing/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,9 @@
'qa_ctl/deployment/vagrantfile_template.txt',
'qa_ctl/provisioning/wazuh_deployment/templates/preloaded_vars.conf.j2',
'data/qactl_conf_validator_schema.json',
'data/all_disabled_ossec.conf'
'data/all_disabled_ossec.conf',
'tools/migration_tool/delta_schema.json',
'tools/migration_tool/CVE_JSON_5.0_bundled.json'
]

scripts_list = [
Expand Down
5 changes: 5 additions & 0 deletions deps/wazuh_testing/wazuh_testing/tools/file.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,6 +235,11 @@ def delete_file(file_path):


def delete_path_recursively(path):
'''Remove a directory recursively.

Args:
path (str): Directory path.
'''
if os.path.exists(path):
shutil.rmtree(path, onerror=on_write_error)

Expand Down
2,035 changes: 2,035 additions & 0 deletions deps/wazuh_testing/wazuh_testing/tools/migration_tool/CVE_JSON_5.0_bundled.json

Large diffs are not rendered by default.

46 changes: 46 additions & 0 deletions deps/wazuh_testing/wazuh_testing/tools/migration_tool/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
'''
Copyright (C) 2015-2023, Wazuh Inc.
Created by Wazuh, Inc. <[email protected]>.
This program is free software; you can redistribute it and/or modify it under the terms
'''
import os


# Module variables
CVE5_SCHEMA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'CVE_JSON_5.0_bundled.json')
DELTA_SCHEMA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'delta_schema.json')
WORKING_DIR = '/var/wazuh'
MIGRATION_TOOL_PATH = f"{WORKING_DIR}/bin/content_migration"
MIGRATION_TOOL_LOG_PATH = f"{WORKING_DIR}/logs/content_migration.log"
GENERATED_FILES_DIR = f"{WORKING_DIR}/incoming"
SNAPSHOTS_DIR = f"{GENERATED_FILES_DIR}/snapshots"
DOWNLOADS_DIR = f"{GENERATED_FILES_DIR}/downloads"
UNCOMPRESSED_DIR = f"{GENERATED_FILES_DIR}/uncompressed"
MYSQL_CREDENTIALS = {
'user': None,
'password': None,
'host': None,
'port': None,
'database': None
}

# Callback messages
CB_PROCESS_STARTED = r'.+\[info\]\[Orchestrator - start\]: Starting process'
CB_FETCHING_STAGE_INITIALIZED = r'.+\[info\].+handleRequest\]: Starting fetch of .+'
CB_FETCHING_STAGE_FINISHED = r'.+\[info\].+fetch\]: Download done successfully'
CB_DECOMPRESSION_STAGE_INITIALIZED = r'.+\[info\].+handleRequest\]: Starting decompression of .+'
CB_PARSER_STAGE_INITIALIZED = r'.+\[info\].+Parser - handleRequest\]: Starting parse of .+'
CB_NORMALIZER_STAGE_INITIALIZED = r'.+\[info\]\[Normalizer.+ - handleRequest]: Starting process'
CB_DIFF_STAGE_INITIALIZED = r'.+\[info\]\[DiffEngine.+ - handleRequest\]: Starting process'
CB_DIFF_STAGE_FINISHED = r'.+\[info\]\[DiffEngine.+ - diffData\]: Created last snapshot: /var/wazuh/incoming/'
CB_PUBLISHER_STAGE_INITIALIZED = r'.+\[info\]\[DiffPublisher - handleRequest\]: Starting process. Configuration:'
CB_PROCESS_COMPLETED = r'.+Migration process completed successfully!'
CB_STAGES = [
CB_PROCESS_STARTED, CB_FETCHING_STAGE_INITIALIZED, CB_FETCHING_STAGE_FINISHED, CB_DECOMPRESSION_STAGE_INITIALIZED,
CB_PARSER_STAGE_INITIALIZED, CB_NORMALIZER_STAGE_INITIALIZED, CB_DIFF_STAGE_INITIALIZED, CB_DIFF_STAGE_FINISHED,
CB_PUBLISHER_STAGE_INITIALIZED, CB_PROCESS_COMPLETED
]
CB_MIGRATION_SKIPPED = r'.+\[info\]\[MigrationStatusCheck.+\]: File already migrated. Stopping migration process.'
CB_REPORT_ERROR_MESSAGE = r'Remote exited with error'
CB_INVALID_CONFIG_MESSAGE = r'No valid configuration file was found at'
ERROR_MESSAGES = [CB_REPORT_ERROR_MESSAGE, CB_INVALID_CONFIG_MESSAGE]
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@

{
"$schema": "http://json-schema.org/draft-07/schema#",
"$id": "/wazuh-content-deltas-schema-V1",
"type": "object",
"properties": {
"cve_id": {
"description": "The unique identifier of a vulnerability.",
"type": "string"
},
"data_blob": {
"description": "The content of the delta.",
"type": "string"
},
"data_hash": {
"description": "The hash of the delta calculated from the data_blob",
"type": "string"
},
"operation": {
"description": "The operation to be executed in the DB.",
"type": "string",
"enum": ["insert", "update", "delete"]
}
},
"required": [ "cve_id", "data_blob", "data_hash", "operation"]
}
190 changes: 190 additions & 0 deletions deps/wazuh_testing/wazuh_testing/tools/migration_tool/utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,190 @@
'''
Copyright (C) 2015-2023, Wazuh Inc.
Created by Wazuh, Inc. <[email protected]>.
This program is free software; you can redistribute it and/or modify it under the terms
'''
import glob
import json
import os
import subprocess as sbp

import mysql.connector
from jsonschema import validate
from jsonschema.exceptions import ValidationError
from mysql.connector import errorcode
from wazuh_testing.tools.migration_tool import MIGRATION_TOOL_PATH, CVE5_SCHEMA_PATH, DELTA_SCHEMA_PATH, \
ERROR_MESSAGES, SNAPSHOTS_DIR, DOWNLOADS_DIR, MIGRATION_TOOL_LOG_PATH, \
MYSQL_CREDENTIALS, UNCOMPRESSED_DIR
from wazuh_testing.tools.file import delete_all_files_in_folder, read_json_file, truncate_file
from wazuh_testing.tools.logging import Logging

logger = Logging('migration_tool')


def run_content_migration_tool(args='', config_files=None):
'''Run the Content Migration Tool with specified parameters and get the output.

Args:
args (str): Arguments to be passed to the tool. For instance: '--debug' or '-w /tmp/workdir'
config_files (list): List of configuration files to be executed.
Returns:
output (str): Result of the tool execution if no error was thrown.
errors (str): Error output if the execution fails.
'''
errors = ''

def run_tool(cmd):
proc = sbp.Popen(cmd, shell=True, stdout=sbp.PIPE, stderr=sbp.PIPE)
out, err = proc.communicate()

return out, err

for file_path in config_files:
truncate_file(MIGRATION_TOOL_LOG_PATH)
command = ' '.join([MIGRATION_TOOL_PATH, '-i', file_path, args])
output, _ = run_tool(command)
output = output.decode()
error_checker = [True for msg in ERROR_MESSAGES if msg in output]
if len(error_checker) > 0:
errors += f"\n{output}"

return output, errors


def get_latest_delta_file(deltas_filepath):
'''Select the newest delta file generated (where the results are) from the list of all files.

Args:
deltas_filepath (str): Path where the files are located.
Returns:
newest_file (str): Path of the newest file.
'''
all_files = glob.glob(os.path.join(deltas_filepath, '*.delta.*'))
newest_file = max(all_files, key=os.path.getctime)

return newest_file


def sanitize_configuration(configuration):
'''Normalize the configuration for it to be correctly processed and compatible.

Args:
configuration (list): Test case configuration to be sanitized.
Returns:
configuration (list): Configuration normalized.
'''
for configurations_obj in configuration:
configurations_list = configurations_obj['configurations']
for config_obj in configurations_list:
for key in config_obj:
config_obj[key.lower()] = config_obj.pop(key)

return configuration


def validate_json_against_schema(json_document, schema):
'''Validate a JSON document under the given schema.

Args:
json_document (str): Path of the JSON document to be validated
schema (str): Path of the CVE5 Schema by default.
Returns:
result (bool): False if the validation thrown an error, True if not.
error (str): Error in the JSON document.
'''
schema = read_json_file(schema)

try:
validate(instance=json_document, schema=schema)
except ValidationError as err:
return False, err.message

return True, ''


def validate_against_delta_schema(_elements):
'''Wrapper function. Validate a file with deltas under the Delta schema.

Args:
_elements (dict): Python dictionary containing the data to be validated against the Delta schema.
'''
_result = True
_errors = []
for cve in _elements:
_result, _error = validate_json_against_schema(cve, DELTA_SCHEMA_PATH)
if _result is False:
_errors.append(_error)

return _errors


def validate_against_cve5_schema(_elements):
'''Wrapper function. Validate a file with deltas under the CVE5 schema.

Args:
_elements (dict): Python dictionary containing the data to be validated against the CVE5 schema.
'''
_result = True
_errors = []

for cves in _elements:
data = json.loads(cves['data_blob'])
_result, _error = validate_json_against_schema(data, CVE5_SCHEMA_PATH)
if _result is False:
_errors.append(_error)

return _errors


def query_publisher_db(query):
'''Function to query the DB created by Content Migration Tool.
Args:
query (str): Query to send to the DB.
Returns:
result (list): Query results, empty if no query was executed or no results were returned.
'''
result = []

try:
connection = mysql.connector.connect(
host=MYSQL_CREDENTIALS['host'],
user=MYSQL_CREDENTIALS['user'],
password=MYSQL_CREDENTIALS['password'],
database=MYSQL_CREDENTIALS['database']
)
except mysql.connector.Error as error:
connection = None
if error.errno == errorcode.ER_ACCESS_DENIED_ERROR:
logger.error('Something is wrong with your user name or password')
elif error.errno == errorcode.ER_BAD_DB_ERROR:
logger.error('Database does not exist')
else:
logger.error(error)

if connection is not None:
cursor = connection.cursor()
cursor.execute(query)
result = cursor.fetchall()
connection.close()

return result


def clean_migration_tool_output_files():
'''Remove all files generated by Content Migration Tool.
'''
output_folders = [SNAPSHOTS_DIR, DOWNLOADS_DIR, UNCOMPRESSED_DIR]
vendors_folders = os.listdir(SNAPSHOTS_DIR)
for output_folder in output_folders:
for folder in vendors_folders:
folder = os.path.join(output_folder, folder)
delete_all_files_in_folder(folder)


def drop_migration_tool_tables():
'''Remove the tables created by CMT.
'''
tables = query_publisher_db('SHOW tables;')
for table in tables:
# `table` is a tuple with 1 element, so this one is selected
query_publisher_db(f"DROP TABLE {table[0]};")
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -48,3 +48,5 @@ deepdiff==5.6.0; platform_system == "Linux" or platform_system=='Windows'
libcst==0.3.23 ; python_version <= '3.6'
treelib==1.6.1
prettytable; platform_system == "Linux"
mysql-connector-python==8.0.32; platform_system == 'Linux' and python_version >= '3.7'
protobuf>=3.11.0,<=3.20.3; platform_system == 'Linux' and python_version >= '3.7'