From 470d476ced6d829dc36c4c0b5c2d91f2c9bdb370 Mon Sep 17 00:00:00 2001 From: quebim Date: Sun, 4 Jun 2023 22:20:35 -0300 Subject: [PATCH 01/10] fix(#4188): temporal fix, pending more research --- .../wazuh_testing/tools/monitoring.py | 8 +-- .../data/enroll_ssl_options_tests.yaml | 9 --- .../test_authd/test_authd_ssl_options.py | 64 ++++++++++--------- 3 files changed, 37 insertions(+), 44 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py index 87e0472c3f..998940251f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py @@ -269,16 +269,16 @@ def open(self): versions_maps = { "ssl_v2_3": ssl.PROTOCOL_SSLv23, "ssl_tls": ssl.PROTOCOL_TLS, - "ssl_tlsv1_1": ssl.PROTOCOL_TLSv1, + "ssl_tlsv1_1": ssl.PROTOCOL_TLSv1_1, "ssl_tlsv1_2": ssl.PROTOCOL_TLSv1_2, } - ssl_version = versions_maps.get(self.connection_protocol.lower(), None) + ssl_version = versions_maps.get(self.connection_protocol.lower()) if ssl_version is None: raise TypeError( f'Invalid or unsupported SSL version specified, valid versions are: {list(versions_maps.keys())}') # Wrap socket into ssl - self.sock = ssl.wrap_socket(self.sock, ssl_version=ssl_version, ciphers=self.ciphers, - certfile=self.certificate, keyfile=self.keyfile) + self.context = ssl.SSLContext(ssl_version) + self.sock = self.context.wrap_socket(self.sock, server_side=False) self.ssl = True # Connect only if protocol is TCP diff --git a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml index a6b5c2bc3a..bbdbee063c 100644 --- a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml +++ b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml @@ -3,7 +3,6 @@ name: "SSL - Default" description: "Default ssl configuration" test_case: - - expect: "output" input: "OSSEC A:'user1'" output: "OSSEC K:'" @@ -13,7 +12,6 @@ name: "SSL - Wrong ciphers" description: "Send a message with low encryption cypher suites" test_case: - - expect: "open_error" input: "OSSEC A:'user1'" error: "handshake failure" @@ -23,7 +21,6 @@ name: "SSL - Incompatible ciphers from Agent" description: "Send a message with low encryption cypher suites" test_case: - - ciphers: "CAMELLIA" protocol: "ssl_tlsv1_2" expect: "open_error" @@ -32,7 +29,6 @@ name: "SSL - Incompatible ciphers from Manger" description: "Send a message with low encryption cypher suites" test_case: - - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" protocol: "ssl_tlsv1_2" expect: "open_error" @@ -43,7 +39,6 @@ name: "SSL - Compatible ciphers from Agent" description: "Send a message with a compatible yet not default cypher" test_case: - - expect: "output" input: "OSSEC A:'user1'" output: "OSSEC K:'" @@ -53,7 +48,6 @@ name: "SSL - Compatible ciphers from Manger" description: "Send a message with a compatible yet not default cypher" test_case: - - expect: "output" input: "OSSEC A:'user1'" output: "OSSEC K:'" @@ -65,7 +59,6 @@ name: "SSL - Wrong TLS version (TLSV1_1)" description: "Send a message with a different TLS version with Auto negotiate disabled" test_case: - - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" protocol: "ssl_tlsv1_1" expect: "open_error" @@ -76,7 +69,6 @@ name: "SSL - Auto Negotiate TLS version (TLSV1_1)" description: "Send a message with a different TLS version with Auto negotiate enabled" test_case: - - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" protocol: "ssl_tlsv1_1" expect: "output" @@ -88,7 +80,6 @@ name: "Valid Certificates - Manager verification without host" description: "Enables CA Certificate and validates that conneciton is acepted when valid certs are provided" test_case: - - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" protocol: "ssl_tlsv1_2" expect: "output" diff --git a/tests/integration/test_authd/test_authd_ssl_options.py b/tests/integration/test_authd/test_authd_ssl_options.py index 4f87e7dc46..3a46783ac9 100644 --- a/tests/integration/test_authd/test_authd_ssl_options.py +++ b/tests/integration/test_authd/test_authd_ssl_options.py @@ -144,7 +144,7 @@ def callback_agentd_startup(line): time.sleep(1) -def test_ossec_auth_configurations(get_configuration, configure_environment, configure_sockets_environment): +def test_ossec_auth_configurations(get_configuration, configure_environment, configure_sockets_environment_function): ''' description: Checks if the 'SSL' settings of the 'wazuh-authd' daemon work correctly by enrolling agents @@ -184,34 +184,36 @@ def test_ossec_auth_configurations(get_configuration, configure_environment, con ''' current_test = get_current_test() - test_case = ssl_configuration_tests[current_test]['test_case'] + config = ssl_configuration_tests[current_test]['test_case'] override_wazuh_conf(get_configuration) - for config in test_case: - address, family, connection_protocol = receiver_sockets_params[0] - SSL_socket = SocketController(address, family=family, connection_protocol=connection_protocol, - open_at_start=False) - ciphers = config['ciphers'] - protocol = config['protocol'] - SSL_socket.set_ssl_configuration(ciphers=ciphers, connection_protocol=protocol) - expect = config['expect'] - try: - SSL_socket.open() - except ssl.SSLError as exception: - if expect == 'open_error': - # We expected the error here, check message - assert config['error'] in str(exception), 'Expected message does not match!' - continue - else: - # We did not expect this error, fail test - raise - SSL_socket.send(config['input'], size=False) - if expect == 'output': - # Output is expected - expected = config['output'] - if expected: - response = SSL_socket.receive().decode() - assert response, 'Failed connection stage {}: {}'.format(test_case.index(config) + 1, config['stage']) - assert response[:len(expected)] == expected, \ - 'Failed test case stage {}: {}'.format(test_case.index(config) + 1, config['stage']) - - return + # print("----- TEST CASE -----\n", test_case) + # for config in test_case: + address, family, connection_protocol = receiver_sockets_params[0] + SSL_socket = SocketController(address, family=family, connection_protocol=connection_protocol, + open_at_start=False) + ciphers = config['ciphers'] + protocol = config['protocol'] + expect = config['expect'] + SSL_socket.set_ssl_configuration(ciphers=ciphers, connection_protocol=protocol) + try: + SSL_socket.open() + except ssl.SSLError as exception: + if expect == 'open_error': + # We expected the error here, check message + assert config['error'] in exception.strerror, 'Expected message does not match!' + return + else: + # We did not expect this error, fail test + raise + if not config.get('input'): + return + SSL_socket.send(config['input'], size=False) + if expect == 'output': + # Output is expected + expected = config['output'] + if expected: + response = SSL_socket.receive().decode() + assert response, 'Failed connection stage: {}'.format(config['stage']) + assert response[:len(expected)] == expected, 'Failed test case stage: {}'.format(config['stage']) + + SSL_socket.close() From 2522c96c46d73a3c228016a2340bcbf631c17b23 Mon Sep 17 00:00:00 2001 From: quebim Date: Mon, 5 Jun 2023 22:15:01 -0300 Subject: [PATCH 02/10] fix(#4188): skipping tests when no compatible ssl version is used --- .../data/enroll_ssl_options_tests.yaml | 42 +++++++++---------- .../test_authd/test_authd_ssl_options.py | 16 +++++-- 2 files changed, 34 insertions(+), 24 deletions(-) diff --git a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml index bbdbee063c..c6ade373da 100644 --- a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml +++ b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml @@ -1,4 +1,25 @@ --- + - + name: "SSL - Wrong TLS version (TLSV1_1)" + description: "Send a message with a different TLS version with Auto negotiate disabled" + test_case: + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_1" + expect: "open_error" + error: "alert protocol version" + # Override ossec.conf + SSL_AUTO_NEGOTIATE: 'no' + - + name: "SSL - Auto Negotiate TLS version (TLSV1_1)" + description: "Send a message with a different TLS version with Auto negotiate enabled" + test_case: + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_1" + expect: "output" + input: "OSSEC A:'user1'" + output: "OSSEC K:'" + # Override ossec.conf + SSL_AUTO_NEGOTIATE: 'yes' - name: "SSL - Default" description: "Default ssl configuration" @@ -55,27 +76,6 @@ protocol: "ssl_tlsv1_2" # Override ossec.conf CIPHERS: "SHA256" - - - name: "SSL - Wrong TLS version (TLSV1_1)" - description: "Send a message with a different TLS version with Auto negotiate disabled" - test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_1" - expect: "open_error" - error: "alert protocol version" - # Override ossec.conf - SSL_AUTO_NEGOTIATE: 'no' - - - name: "SSL - Auto Negotiate TLS version (TLSV1_1)" - description: "Send a message with a different TLS version with Auto negotiate enabled" - test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_1" - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - # Override ossec.conf - SSL_AUTO_NEGOTIATE: 'yes' - name: "Valid Certificates - Manager verification without host" description: "Enables CA Certificate and validates that conneciton is acepted when valid certs are provided" diff --git a/tests/integration/test_authd/test_authd_ssl_options.py b/tests/integration/test_authd/test_authd_ssl_options.py index 3a46783ac9..f228f9f902 100644 --- a/tests/integration/test_authd/test_authd_ssl_options.py +++ b/tests/integration/test_authd/test_authd_ssl_options.py @@ -141,10 +141,9 @@ def callback_agentd_startup(line): log_monitor = FileMonitor(LOG_FILE_PATH) log_monitor.start(timeout=30, callback=callback_agentd_startup) - time.sleep(1) -def test_ossec_auth_configurations(get_configuration, configure_environment, configure_sockets_environment_function): +def test_ossec_auth_configurations(get_configuration, configure_environment, configure_sockets_environment): ''' description: Checks if the 'SSL' settings of the 'wazuh-authd' daemon work correctly by enrolling agents @@ -183,7 +182,15 @@ def test_ossec_auth_configurations(get_configuration, configure_environment, con - ssl ''' current_test = get_current_test() - + + import subprocess + + command = ['openssl version'] + supported_openssl = [ "1.1.0", "1.0.2", "1.0.0", "0.9.8"] + output_object = subprocess.run(command, shell=True, + text=True, capture_output=True) + openssl_version = output_object.stdout.split()[1] + config = ssl_configuration_tests[current_test]['test_case'] override_wazuh_conf(get_configuration) # print("----- TEST CASE -----\n", test_case) @@ -194,6 +201,9 @@ def test_ossec_auth_configurations(get_configuration, configure_environment, con ciphers = config['ciphers'] protocol = config['protocol'] expect = config['expect'] + + if openssl_version not in supported_openssl and protocol == "ssl_tlsv1_1": + pytest.skip("Unsuported TLS version") SSL_socket.set_ssl_configuration(ciphers=ciphers, connection_protocol=protocol) try: SSL_socket.open() From 2b3a4889ad968a4d40f4f26753b1af296ecb78da Mon Sep 17 00:00:00 2001 From: quebim Date: Tue, 6 Jun 2023 11:02:17 -0300 Subject: [PATCH 03/10] fix(#4188): skip TLS 1.1 tests --- .../data/enroll_ssl_options_tests.yaml | 173 +++++++++--------- .../test_authd/test_authd_ssl_options.py | 32 ++-- 2 files changed, 99 insertions(+), 106 deletions(-) diff --git a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml index c6ade373da..b07d936ca0 100644 --- a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml +++ b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml @@ -1,87 +1,86 @@ ---- - - - name: "SSL - Wrong TLS version (TLSV1_1)" - description: "Send a message with a different TLS version with Auto negotiate disabled" - test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_1" - expect: "open_error" - error: "alert protocol version" - # Override ossec.conf - SSL_AUTO_NEGOTIATE: 'no' - - - name: "SSL - Auto Negotiate TLS version (TLSV1_1)" - description: "Send a message with a different TLS version with Auto negotiate enabled" - test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_1" - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - # Override ossec.conf - SSL_AUTO_NEGOTIATE: 'yes' - - - name: "SSL - Default" - description: "Default ssl configuration" - test_case: - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" - - - name: "SSL - Wrong ciphers" - description: "Send a message with low encryption cypher suites" - test_case: - expect: "open_error" - input: "OSSEC A:'user1'" - error: "handshake failure" - ciphers: "CAMELLIA" - protocol: "ssl_tlsv1_2" - - - name: "SSL - Incompatible ciphers from Agent" - description: "Send a message with low encryption cypher suites" - test_case: - ciphers: "CAMELLIA" - protocol: "ssl_tlsv1_2" - expect: "open_error" - error: "handshake failure" - - - name: "SSL - Incompatible ciphers from Manger" - description: "Send a message with low encryption cypher suites" - test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" - expect: "open_error" - error: "handshake failure" - # Override ossec.conf - CIPHERS: "CAMELLIA" - - - name: "SSL - Compatible ciphers from Agent" - description: "Send a message with a compatible yet not default cypher" - test_case: - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - ciphers: "SHA256" - protocol: "ssl_tlsv1_2" - - - name: "SSL - Compatible ciphers from Manger" - description: "Send a message with a compatible yet not default cypher" - test_case: - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" - # Override ossec.conf - CIPHERS: "SHA256" - - - name: "Valid Certificates - Manager verification without host" - description: "Enables CA Certificate and validates that conneciton is acepted when valid certs are provided" - test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" +- + name: "SSL - Default" + description: "Default ssl configuration" + test_case: + expect: "output" + input: "OSSEC A:'user1'" + output: "OSSEC K:'" + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_2" +- + name: "SSL - Wrong ciphers" + description: "Send a message with low encryption cypher suites" + test_case: + expect: "open_error" + input: "OSSEC A:'user1'" + error: "handshake failure" + ciphers: "CAMELLIA" + protocol: "ssl_tlsv1_2" +- + name: "SSL - Incompatible ciphers from Agent" + description: "Send a message with low encryption cypher suites" + test_case: + ciphers: "CAMELLIA" + protocol: "ssl_tlsv1_2" + expect: "open_error" + error: "handshake failure" +- + name: "SSL - Incompatible ciphers from Manger" + description: "Send a message with low encryption cypher suites" + test_case: + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_2" + expect: "open_error" + error: "handshake failure" + # Override ossec.conf + CIPHERS: "CAMELLIA" +- + name: "SSL - Compatible ciphers from Agent" + description: "Send a message with a compatible yet not default cypher" + test_case: + expect: "output" + input: "OSSEC A:'user1'" + output: "OSSEC K:'" + ciphers: "SHA256" + protocol: "ssl_tlsv1_2" +- + name: "SSL - Wrong TLS version (TLSV1_1)" + description: "Send a message with a different TLS version with Auto negotiate disabled" + test_case: + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_1" + expect: "open_error" + error: "alert protocol version" + # Override ossec.conf + SSL_AUTO_NEGOTIATE: 'no' +- + name: "SSL - Auto Negotiate TLS version (TLSV1_1)" + description: "Send a message with a different TLS version with Auto negotiate enabled" + test_case: + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_1" + expect: "output" + input: "OSSEC A:'user1'" + output: "OSSEC K:'" + # Override ossec.conf + SSL_AUTO_NEGOTIATE: 'yes' +- + name: "SSL - Compatible ciphers from Manger" + description: "Send a message with a compatible yet not default cypher" + test_case: + expect: "output" + input: "OSSEC A:'user1'" + output: "OSSEC K:'" + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_2" + # Override ossec.conf + CIPHERS: "SHA256" +- + name: "Valid Certificates - Manager verification without host" + description: "Enables CA Certificate and validates that conneciton is acepted when valid certs are provided" + test_case: + ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" + protocol: "ssl_tlsv1_2" + expect: "output" + input: "OSSEC A:'user1'" + output: "OSSEC K:'" diff --git a/tests/integration/test_authd/test_authd_ssl_options.py b/tests/integration/test_authd/test_authd_ssl_options.py index f228f9f902..8b30415d8a 100644 --- a/tests/integration/test_authd/test_authd_ssl_options.py +++ b/tests/integration/test_authd/test_authd_ssl_options.py @@ -182,42 +182,35 @@ def test_ossec_auth_configurations(get_configuration, configure_environment, con - ssl ''' current_test = get_current_test() + config = ssl_configuration_tests[current_test]['test_case'] + ciphers = config['ciphers'] + protocol = config['protocol'] + expect = config['expect'] - import subprocess - - command = ['openssl version'] - supported_openssl = [ "1.1.0", "1.0.2", "1.0.0", "0.9.8"] - output_object = subprocess.run(command, shell=True, - text=True, capture_output=True) - openssl_version = output_object.stdout.split()[1] + if protocol == 'ssl_tlsv1_1': + pytest.skip('TLS 1.1 is deprecated and not working on several pyOpenSSL versions.') - config = ssl_configuration_tests[current_test]['test_case'] override_wazuh_conf(get_configuration) - # print("----- TEST CASE -----\n", test_case) - # for config in test_case: + address, family, connection_protocol = receiver_sockets_params[0] SSL_socket = SocketController(address, family=family, connection_protocol=connection_protocol, open_at_start=False) - ciphers = config['ciphers'] - protocol = config['protocol'] - expect = config['expect'] - if openssl_version not in supported_openssl and protocol == "ssl_tlsv1_1": - pytest.skip("Unsuported TLS version") SSL_socket.set_ssl_configuration(ciphers=ciphers, connection_protocol=protocol) + try: SSL_socket.open() except ssl.SSLError as exception: if expect == 'open_error': - # We expected the error here, check message + # We expected the error here, check message. assert config['error'] in exception.strerror, 'Expected message does not match!' return else: - # We did not expect this error, fail test + # We did not expect this error, fail test. raise - if not config.get('input'): - return + SSL_socket.send(config['input'], size=False) + if expect == 'output': # Output is expected expected = config['output'] @@ -226,4 +219,5 @@ def test_ossec_auth_configurations(get_configuration, configure_environment, con assert response, 'Failed connection stage: {}'.format(config['stage']) assert response[:len(expected)] == expected, 'Failed test case stage: {}'.format(config['stage']) + # Finally close the socket. TODO: This must be handled on a fixture. SSL_socket.close() From f061bff660063274a92c2a5a6a9d3e2f9cb44049 Mon Sep 17 00:00:00 2001 From: quebim Date: Tue, 6 Jun 2023 11:14:23 -0300 Subject: [PATCH 04/10] revert(#4188): undo changes on monitoring --- deps/wazuh_testing/wazuh_testing/tools/monitoring.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py index 998940251f..87e0472c3f 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/monitoring.py +++ b/deps/wazuh_testing/wazuh_testing/tools/monitoring.py @@ -269,16 +269,16 @@ def open(self): versions_maps = { "ssl_v2_3": ssl.PROTOCOL_SSLv23, "ssl_tls": ssl.PROTOCOL_TLS, - "ssl_tlsv1_1": ssl.PROTOCOL_TLSv1_1, + "ssl_tlsv1_1": ssl.PROTOCOL_TLSv1, "ssl_tlsv1_2": ssl.PROTOCOL_TLSv1_2, } - ssl_version = versions_maps.get(self.connection_protocol.lower()) + ssl_version = versions_maps.get(self.connection_protocol.lower(), None) if ssl_version is None: raise TypeError( f'Invalid or unsupported SSL version specified, valid versions are: {list(versions_maps.keys())}') # Wrap socket into ssl - self.context = ssl.SSLContext(ssl_version) - self.sock = self.context.wrap_socket(self.sock, server_side=False) + self.sock = ssl.wrap_socket(self.sock, ssl_version=ssl_version, ciphers=self.ciphers, + certfile=self.certificate, keyfile=self.keyfile) self.ssl = True # Connect only if protocol is TCP From 62310d2b82b7c478830f887920a68d2b27e6a62a Mon Sep 17 00:00:00 2001 From: quebim Date: Tue, 6 Jun 2023 11:15:34 -0300 Subject: [PATCH 05/10] docs(#4188): update CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 65a30bcdc8..1cc6700d5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ All notable changes to this project will be documented in this file. ### Changed +- Skip `test_authd_ssl_options` cases that use TLS 1.1 causing errors on several OpenSSL versions \- (Tests) - Update `get_test_cases_data` function so it handles fim_mode parameter ([#4185](https://github.com/wazuh/wazuh-qa/pull/4185)) \- (Framework) - Change FIM `regular_file_cud` and `EventChecker` file modification steps ([#4183](https://github.com/wazuh/wazuh-qa/pull/4183)) \- (Framework + Tests) - Refactor library to change the environment ([#4145](https://github.com/wazuh/wazuh-qa/pull/4145)) \- (Framework) From 43ed0a31c9cf4defa7366bf4806b0930828a0317 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bel=C3=A9n=20Valdivia?= Date: Thu, 1 Jun 2023 10:42:17 -0700 Subject: [PATCH 06/10] Syscollector test module refactor (#3579) * feat(#3541): enhacement syscollector test * feat(#3541): added test cases * refactor(#3541): deleted old files * feat(#3541): added test cases folder * feat(#3541): added new description * fix(#3541): deleted space * fix(#3541): linter errors * fix(#3541): Fix changelog error * fix(#3451): deleted imports * fix(#3541): adapt cases to new expected payload * fix(#3541): style to docstrings and remove remaining print --------- Co-authored-by: quebim --- CHANGELOG.md | 1 + tests/integration/test_analysisd/conftest.py | 9 +- .../data/{ => rules}/syscollector_rules.xml | 0 .../test_syscollector/data/syscollector.yaml | 325 -------------- .../cases_syscollector_integration.yaml | 414 ++++++++++++++++++ .../test_syscollector_events.py | 152 ------- .../test_syscollector_integration.py | 137 ++++++ 7 files changed, 557 insertions(+), 481 deletions(-) rename tests/integration/test_analysisd/test_syscollector/data/{ => rules}/syscollector_rules.xml (100%) delete mode 100644 tests/integration/test_analysisd/test_syscollector/data/syscollector.yaml create mode 100644 tests/integration/test_analysisd/test_syscollector/data/test_cases/cases_syscollector_integration.yaml delete mode 100644 tests/integration/test_analysisd/test_syscollector/test_syscollector_events.py create mode 100644 tests/integration/test_analysisd/test_syscollector/test_syscollector_integration.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 1cc6700d5c..21c1b5bdba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -170,6 +170,7 @@ Release report: https://github.com/wazuh/wazuh/issues/15504 - Analysisd: Reduce execution time of tests with tier 0 ([#2546](https://github.com/wazuh/wazuh-qa/pull/2546)) \- (Tests) - Adapt logtest ITs given the rules skipping ([#2200](https://github.com/wazuh/wazuh-qa/pull/2200)) \- (Tests) - Updated the Authd response when a multigroup is too long ([#3746](https://github.com/wazuh/wazuh-qa/pull/3746)) \- (Tests) +- Refactor ITs related to syscollector deltas alerts ([#3579](https://github.com/wazuh/wazuh-qa/pull/3579)) \- (Tests) ### Fixed diff --git a/tests/integration/test_analysisd/conftest.py b/tests/integration/test_analysisd/conftest.py index 72486c09eb..425a883ec5 100644 --- a/tests/integration/test_analysisd/conftest.py +++ b/tests/integration/test_analysisd/conftest.py @@ -47,13 +47,14 @@ def callback_analysisd_startup(line): @pytest.fixture(scope='module') -def configure_custom_rules(request, get_configuration): +def configure_custom_rules(request): """Configure a syscollector custom rules for testing. Restarting wazuh-analysisd is required to apply this changes. """ - data_dir = getattr(request.module, 'data_dir') - source_rule = os.path.join(data_dir, get_configuration['rule_file']) - target_rule = os.path.join(CUSTOM_RULES_PATH, get_configuration['rule_file']) + data_dir = getattr(request.module, 'TEST_RULES_PATH') + data_file = getattr(request.module, 'rule_file') + source_rule = os.path.join(data_dir, data_file) + target_rule = os.path.join(CUSTOM_RULES_PATH, data_file) # copy custom rule with specific privileges shutil.copy(source_rule, target_rule) diff --git a/tests/integration/test_analysisd/test_syscollector/data/syscollector_rules.xml b/tests/integration/test_analysisd/test_syscollector/data/rules/syscollector_rules.xml similarity index 100% rename from tests/integration/test_analysisd/test_syscollector/data/syscollector_rules.xml rename to tests/integration/test_analysisd/test_syscollector/data/rules/syscollector_rules.xml diff --git a/tests/integration/test_analysisd/test_syscollector/data/syscollector.yaml b/tests/integration/test_analysisd/test_syscollector/data/syscollector.yaml deleted file mode 100644 index 3db7441174..0000000000 --- a/tests/integration/test_analysisd/test_syscollector/data/syscollector.yaml +++ /dev/null @@ -1,325 +0,0 @@ -- - name: Test syscollector events - rule_file: syscollector_rules.xml - event_header: '(myhostname) any->syscollector:' - test_case: - - - description: Process creation - event_payload: >- - {"data":{"argvs":"180","checksum":"343ed10dc637334a7400d01b8a28deb8db5cba28","cmd":"","egroup":"root", - "euser":"root","fgroup":"root","name":"sleep","nice":0,"nlwp":1,"pgrp":116167,"pid":"156102","ppid":116169, - "priority":20,"processor":3,"resident":129,"rgroup":"root","ruser":"root","scan_time":"2021/10/13 14:57:07", - "session":116167,"sgroup":"root","share":114,"size":2019,"start_time":5799612,"state":"S","stime":0, - "suser":"root","tgid":156102,"tty":0,"utime":0,"vm_size":8076},"operation":"INSERTED", - "type":"dbsync_processes"} - alert_expected_values: - rule.id: '100301' - data: >- - {"type":"dbsync_processes","process":{"pid":"156102","name":"sleep","state":"S","ppid":"116169","utime":"0", - "stime":"0","args":"180","euser":"root","ruser":"root","suser":"root","egroup":"root","rgroup":"root", - "sgroup":"root","fgroup":"root","priority":"20","nice":"0","size":"2019","vm_size":"8076","resident":"129", - "share":"114","start_time":"5799612","pgrp":"116167","session":"116167","nlwp":"1","tgid":"156102","tty":"0", - "processor":"3"},"operation_type":"INSERTED"} - - - description: Process modification - event_payload: >- - {"data":{"argvs":"180","checksum":"45cb0637a5b43ed1a819ac6cb4cf4d6d4f15f87","cmd":"","egroup":"root", - "euser":"root","fgroup":"root","name":"sleep","nice":0,"nlwp":1,"pgrp":116167,"pid":"156102","ppid":116169, - "priority":10,"processor":3,"resident":129,"rgroup":"root","ruser":"root","scan_time":"2021/10/13 14:57:08", - "session":116167,"sgroup":"root","share":114,"size":2019,"start_time":5799612,"state":"S","stime":0, - "suser":"root","tgid":156102,"tty":0,"utime":0,"vm_size":8076},"operation":"MODIFIED", - "type":"dbsync_processes"} - - alert_expected_values: - rule.id: '100302' - data: >- - {"type":"dbsync_processes","process":{"pid":"156102","name":"sleep","state":"S","ppid":"116169","utime":"0", - "stime":"0","args":"180","euser":"root","ruser":"root","suser":"root","egroup":"root","rgroup":"root", - "sgroup":"root","fgroup":"root","priority":"10","nice":"0","size":"2019","vm_size":"8076","resident":"129", - "share":"114","start_time":"5799612","pgrp":"116167","session":"116167","nlwp":"1","tgid":"156102","tty":"0", - "processor":"3"},"operation_type":"MODIFIED"} - - - description: Process deletion - event_payload: >- - {"data":{"argvs":"180","checksum":"45cb0637a5b43ed1a819ac6cb4cf4d6d4f15f87","cmd":"","egroup":"root", - "euser":"root","fgroup":"root","name":"sleep","nice":0,"nlwp":1,"pgrp":116167,"pid":"156102","ppid":116169, - "priority":10,"processor":3,"resident":129,"rgroup":"root","ruser":"root","scan_time":"2021/10/13 14:57:09", - "session":116167,"sgroup":"root","share":114,"size":2019,"start_time":5799612,"state":"S","stime":0, - "suser":"root","tgid":156102,"tty":0,"utime":0,"vm_size":8076},"operation":"DELETED", - "type":"dbsync_processes"} - alert_expected_values: - rule.id: '100303' - data: >- - {"type":"dbsync_processes","process":{"pid":"156102","name":"sleep","state":"S","ppid":"116169","utime":"0", - "stime":"0","args":"180","euser":"root","ruser":"root","suser":"root","egroup":"root","rgroup":"root", - "sgroup":"root","fgroup":"root","priority":"10","nice":"0","size":"2019","vm_size":"8076","resident":"129", - "share":"114","start_time":"5799612","pgrp":"116167","session":"116167","nlwp":"1","tgid":"156102","tty":"0", - "processor":"3"},"operation_type":"DELETED"} - - - description: Port creation - event_payload: >- - {"data":{"checksum":"eff13e52290143eb5b5b9b8c191902609f37c712","inode":494908, - "item_id":"e2c92964ad145a635139f6318057506e386e00a3","local_ip":"0.0.0.0","local_port":34340,"pid":0, - "process":null,"protocol":"tcp","remote_ip":"0.0.0.0","remote_port":0,"rx_queue":0, - "scan_time":"2021/10/13 14:40:02","state":"listening","tx_queue":0},"operation":"INSERTED", - "type":"dbsync_ports"} - alert_expected_values: - rule.id: '100311' - data: >- - {"type":"dbsync_ports","port":{"protocol":"tcp","local_ip":"0.0.0.0","local_port":"34340", - "remote_ip":"0.0.0.0","remote_port":"0","tx_queue":"0","rx_queue":"0","inode":"494908","state":"listening", - "pid":"0"},"operation_type":"INSERTED"} - - - description: Port modification - event_payload: >- - {"data":{"checksum":"eff13e52290143eb5b5b9b8c191902609f37c713","inode":494908, - "item_id":"e2c92964ad145a635139f6318057506e386e00a3","local_ip":"0.0.0.0","local_port":34340,"pid":0, - "process":null,"protocol":"tcp","remote_ip":"0.0.0.0","remote_port":0,"rx_queue":1, - "scan_time":"2021/10/13 14:40:03","state":"listening","tx_queue":1},"operation":"MODIFIED", - "type":"dbsync_ports"} - alert_expected_values: - rule.id: '100312' - data: >- - {"type":"dbsync_ports","port":{"protocol":"tcp","local_ip":"0.0.0.0","local_port":"34340", - "remote_ip":"0.0.0.0","remote_port":"0","tx_queue":"1","rx_queue":"1","inode":"494908","state":"listening", - "pid":"0"},"operation_type":"MODIFIED"} - - - description: Port deletion - event_payload: >- - {"data":{"checksum":"eff13e52290143eb5b5b9b8c191902609f37c713","inode":494908, - "item_id":"e2c92964ad145a635139f6318057506e386e00a3","local_ip":"0.0.0.0","local_port":34340,"pid":0, - "process":null,"protocol":"tcp","remote_ip":"0.0.0.0","remote_port":0,"rx_queue":1, - "scan_time":"2021/10/13 14:40:04","state":"listening","tx_queue":1},"operation":"DELETED", - "type":"dbsync_ports"} - alert_expected_values: - rule.id: '100313' - data: >- - {"type":"dbsync_ports","port":{"protocol":"tcp","local_ip":"0.0.0.0","local_port":"34340", - "remote_ip":"0.0.0.0","remote_port":"0","tx_queue":"1","rx_queue":"1","inode":"494908","state":"listening", - "pid":"0"},"operation_type":"DELETED"} - - - description: Osinfo creation - event_payload: >- - {"data":{"checksum":"1634140017886803554","architecture":"x86_64","hostname":"UBUNTU","os_build":"7601", - "os_major":"6","os_minor":"1","os_name":"Microsoft Windows 7","os_release":"sp1","os_version":"6.1.7601", - "os_display_version":"test"},"operation":"INSERTED","type":"dbsync_osinfo"} - alert_expected_values: - rule.id: '100321' - data: >- - {"type":"dbsync_osinfo","os":{"hostname":"UBUNTU","architecture":"x86_64","name":"Microsoft Windows 7", - "version":"6.1.7601","major":"6","minor":"1","build":"7601","os_release":"sp1","display_version":"test"}, - "operation_type":"INSERTED"} - - - description: Osinfo modification - event_payload: >- - {"data":{"checksum":"1634140017886803554","architecture":"x86_64","hostname":"UBUNTU","os_build":"7601", - "os_major":"6","os_minor":"1","os_name":"Microsoft Windows 7","os_release":"sp1","os_version":"6.1.7601", - "os_display_version":"test_text"},"operation":"MODIFIED","type":"dbsync_osinfo"} - alert_expected_values: - rule.id: '100322' - data: >- - {"type":"dbsync_osinfo","os":{"hostname":"UBUNTU","architecture":"x86_64","name":"Microsoft Windows 7", - "version":"6.1.7601","major":"6","minor":"1","build":"7601","os_release":"sp1","display_version":"test_text"}, - "operation_type":"MODIFIED"} - - - description: Hwinfo creation - event_payload: >- - {"data":{"scan_time":"2021/10/13 14:41:43","board_serial":"Intel Corporation", - "checksum":"af7b22eef8f5e06c04af4db49c9f8d1d28963918","cpu_MHz":2904,"cpu_cores":2, - "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","ram_free":2257872,"ram_total":4972208,"ram_usage":54}, - "operation":"INSERTED","type":"dbsync_hwinfo"} - alert_expected_values: - rule.id: '100331' - data: >- - {"type":"dbsync_hwinfo","hardware":{"serial":"Intel Corporation", - "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","cpu_cores":"2","cpu_mhz":"2904","ram_total":"4972208", - "ram_free":"2257872","ram_usage":"54"},"operation_type":"INSERTED"} - - - description: Hwinfo modification - event_payload: >- - {"data":{"scan_time":"2021/10/13 14:41:44","board_serial":"Intel Corporation", - "checksum":"af7b22eef8f5e06c04af4db49c9f8d1d28963919","cpu_MHz":2904,"cpu_cores":4, - "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","ram_free":2257872,"ram_total":4972208,"ram_usage":54}, - "operation":"MODIFIED","type":"dbsync_hwinfo"} - alert_expected_values: - rule.id: '100332' - data: >- - {"type":"dbsync_hwinfo","hardware":{"serial":"Intel Corporation", - "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","cpu_cores":"4","cpu_mhz":"2904","ram_total":"4972208", - "ram_free":"2257872","ram_usage":"54"},"operation_type":"MODIFIED"} - - - description: Package creation - event_payload: >- - {"data":{"architecture":"amd64","checksum":"1c1bf8bbc20caef77010f960461cc20fb9c67568", - "description":"Qt 5 OpenGL module","format":"deb","groups":"libs", - "item_id":"caa4868d177fbebc5b145a2a92497ebcf566838a","multiarch":"same","name":"libqt5opengl5", - "priority":"optional","scan_time":"2021/10/13 15:10:49","size":572,"source":"qtbase-opensource-src", - "vendor":"Ubuntu Developers ","version":"5.12.8+dfsg-0ubuntu1"}, - "operation":"INSERTED","type":"dbsync_packages"} - alert_expected_values: - rule.id: '100341' - data: >- - {"type":"dbsync_packages","program":{"format":"deb","name":"libqt5opengl5","priority":"optional", - "section":"libs","size":"572","vendor":"Ubuntu Developers ", - "version":"5.12.8+dfsg-0ubuntu1","architecture":"amd64","multiarch":"same","source":"qtbase-opensource-src", - "description":"Qt 5 OpenGL module"},"operation_type":"INSERTED"} - - - description: Package modification - event_payload: >- - {"data":{"architecture":"amd64","checksum":"1c1bf8bbc20caef77010f960461cc20fb9c67569", - "description":"Qt 5 OpenGL module","format":"deb","groups":"libs", - "item_id":"caa4868d177fbebc5b145a2a92497ebcf566838a","multiarch":"same","name":"libqt5opengl5", - "priority":"option","scan_time":"2021/10/13 15:10:50","size":572,"source":"qtbase-opensource-src", - "vendor":"Ubuntu Developers ","version":"5.12.8+dfsg-0ubuntu1"}, - "operation":"MODIFIED","type":"dbsync_packages"} - alert_expected_values: - rule.id: '100342' - data: >- - {"type":"dbsync_packages","program":{"format":"deb","name":"libqt5opengl5","priority":"option", - "section":"libs","size":"572","vendor":"Ubuntu Developers ", - "version":"5.12.8+dfsg-0ubuntu1","architecture":"amd64","multiarch":"same","source":"qtbase-opensource-src", - "description":"Qt 5 OpenGL module"},"operation_type":"MODIFIED"} - - - description: Package deletion - event_payload: >- - {"data":{"architecture":"amd64","checksum":"1c1bf8bbc20caef77010f960461cc20fb9c67569", - "description":"Qt 5 OpenGL module","format":"deb","groups":"libs", - "item_id":"caa4868d177fbebc5b145a2a92497ebcf566838a","multiarch":"same","name":"libqt5opengl5", - "priority":"option","scan_time":"2021/10/13 15:10:51","size":572,"source":"qtbase-opensource-src", - "vendor":"Ubuntu Developers ","version":"5.12.8+dfsg-0ubuntu1"}, - "operation":"DELETED","type":"dbsync_packages"} - alert_expected_values: - rule.id: '100343' - data: >- - {"type":"dbsync_packages","program":{"format":"deb","name":"libqt5opengl5","priority":"option", - "section":"libs","size":"572","vendor":"Ubuntu Developers ", - "version":"5.12.8+dfsg-0ubuntu1","architecture":"amd64","multiarch":"same","source":"qtbase-opensource-src", - "description":"Qt 5 OpenGL module"},"operation_type":"DELETED"} - - - description: Network interface creation - event_payload: >- - {"data":{"adapter":null,"checksum":"ce57e9ae697de4e427b67fea0d28c25e130249b7", - "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","mac":"92:27:3b:ee:11:96","mtu":1500,"name":"dummy0", - "rx_bytes":0,"rx_dropped":0,"rx_errors":0,"rx_packets":0,"scan_time":"2021/10/13 18:32:06","state":"down", - "tx_bytes":0,"tx_dropped":0,"tx_errors":0,"tx_packets":0,"type":"ethernet"},"operation":"INSERTED", - "type":"dbsync_network_iface"} - alert_expected_values: - rule.id: '100351' - data: >- - {"type":"dbsync_network_iface","netinfo":{"iface":{"name":"dummy0","type":"ethernet","state":"down", - "mtu":"1500","mac":"92:27:3b:ee:11:96","tx_packets":"0","rx_packets":"0","tx_bytes":"0","rx_bytes":"0", - "tx_errors":"0","rx_errors":"0","tx_dropped":"0","rx_dropped":"0"}},"operation_type":"INSERTED"} - - - description: Network interface modification - event_payload: >- - {"data":{"adapter":null,"checksum":"ce57e9ae697de4e427b67fea0d28c25e130249b8", - "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","mac":"92:27:3b:ee:11:96","mtu":1500,"name":"dummy0", - "rx_bytes":0,"rx_dropped":0,"rx_errors":0,"rx_packets":0,"scan_time":"2021/10/13 18:32:07","state":"up", - "tx_bytes":0,"tx_dropped":0,"tx_errors":0,"tx_packets":0,"type":"ethernet"},"operation":"MODIFIED", - "type":"dbsync_network_iface"} - alert_expected_values: - rule.id: '100352' - data: >- - {"type":"dbsync_network_iface","netinfo":{"iface":{"name":"dummy0","type":"ethernet","state":"up", - "mtu":"1500","mac":"92:27:3b:ee:11:96","tx_packets":"0","rx_packets":"0","tx_bytes":"0","rx_bytes":"0", - "tx_errors":"0","rx_errors":"0","tx_dropped":"0","rx_dropped":"0"}},"operation_type":"MODIFIED"} - - - description: Network protocol creation - event_payload: >- - {"data":{"checksum":"3d8855caa85501d22b40fa6616c0670f206b2c4e","gateway":" ","dhcp":"enabled","iface":"dummy0", - "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","scan_time":"2021/10/13 18:32:06","type":"ethernet"}, - "operation":"INSERTED","type":"dbsync_network_protocol"} - alert_expected_values: - rule.id: '100361' - data: >- - {"type":"dbsync_network_protocol","netinfo":{"proto":{"iface":"dummy0","type":"ethernet","gateway":" ", - "dhcp":"enabled"}},"operation_type":"INSERTED"} - - - description: Network protocol modification - event_payload: >- - {"data":{"checksum":"3d8855caa85501d22b40fa6616c0670f206b2ca4","gateway":" ","dhcp":"disabled","iface":"dummy0", - "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","scan_time":"2021/10/13 18:32:06","type":"ethernet"}, - "operation":"MODIFIED","type":"dbsync_network_protocol"} - alert_expected_values: - rule.id: '100362' - data: >- - {"type":"dbsync_network_protocol","netinfo":{"proto":{"iface":"dummy0","type":"ethernet","gateway":" ", - "dhcp":"disabled"}},"operation_type":"MODIFIED"} - - - description: Network protocol deletion - event_payload: >- - {"data":{"checksum":"3d8855caa85501d22b40fa6616c0670f206b2ca4","gateway":" ","dhcp":"disabled","iface":"dummy0", - "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","scan_time":"2021/10/13 18:32:06","type":"ethernet"}, - "operation":"DELETED","type":"dbsync_network_protocol"} - alert_expected_values: - rule.id: '100363' - data: >- - {"type":"dbsync_network_protocol","netinfo":{"proto":{"iface":"dummy0","type":"ethernet","gateway":" ", - "dhcp":"disabled"}},"operation_type":"DELETED"} - - - description: Network interface deletion - event_payload: >- - {"data":{"adapter":null,"checksum":"ce57e9ae697de4e427b67fea0d28c25e130249b8", - "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","mac":"92:27:3b:ee:11:96","mtu":1500,"name":"dummy0", - "rx_bytes":0,"rx_dropped":0,"rx_errors":0,"rx_packets":0,"scan_time":"2021/10/13 18:32:07","state":"up", - "tx_bytes":0,"tx_dropped":0,"tx_errors":0,"tx_packets":0,"type":"ethernet"},"operation":"DELETED", - "type":"dbsync_network_iface"} - alert_expected_values: - rule.id: '100353' - data: >- - {"type":"dbsync_network_iface","netinfo":{"iface":{"name":"dummy0","type":"ethernet","state":"up", - "mtu":"1500","mac":"92:27:3b:ee:11:96","tx_packets":"0","rx_packets":"0","tx_bytes":"0","rx_bytes":"0", - "tx_errors":"0","rx_errors":"0","tx_dropped":"0","rx_dropped":"0"}},"operation_type":"DELETED"} - - - description: Network address creation - event_payload: >- - {"data":{"address":"192.168.100.12","broadcast":"192.168.100.255", - "checksum":"ec5e14340b8ced5b39cbcfa9abecbfdbd1f2873f","dhcp":"unknown","iface":"enp0s3", - "item_id":"7b4e5f1da50834d71d895a3065a3bb098a0b8a5c","metric":"100","netmask":"255.255.255.0","proto":0, - "scan_time":"2021/10/13 16:46:37"},"operation":"INSERTED","type":"dbsync_network_address"} - alert_expected_values: - rule.id: '100371' - data: >- - {"type":"dbsync_network_address","netinfo":{"addr":{"iface":"enp0s3","proto":"ipv4","address":"192.168.100.12", - "netmask":"255.255.255.0","broadcast":"192.168.100.255"}},"operation_type":"INSERTED"} - - - description: Network address modification - event_payload: >- - {"data":{"address":"192.168.100.12","broadcast":"192.168.100.254", - "checksum":"ec5e14340b8ced5b39cbcfa9abecbfdbd1f2873f","dhcp":"unknown","iface":"enp0s3", - "item_id":"7b4e5f1da50834d71d895a3065a3bb098a0b8a5c","metric":"100","netmask":"255.255.255.0","proto":0, - "scan_time":"2021/10/13 16:46:38"},"operation":"MODIFIED","type":"dbsync_network_address"} - alert_expected_values: - rule.id: '100372' - data: >- - {"type":"dbsync_network_address","netinfo":{"addr":{"iface":"enp0s3","proto":"ipv4","address":"192.168.100.12", - "netmask":"255.255.255.0","broadcast":"192.168.100.254"}},"operation_type":"MODIFIED"} - - - description: Network address deletion - event_payload: >- - {"data":{"address":"192.168.100.12","broadcast":"192.168.100.254", - "checksum":"ec5e14340b8ced5b39cbcfa9abecbfdbd1f2873f","dhcp":"unknown","iface":"enp0s3", - "item_id":"7b4e5f1da50834d71d895a3065a3bb098a0b8a5c","metric":"100","netmask":"255.255.255.0","proto":0, - "scan_time":"2021/10/13 16:46:39"},"operation":"DELETED","type":"dbsync_network_address"} - alert_expected_values: - rule.id: '100373' - data: >- - {"type":"dbsync_network_address","netinfo":{"addr":{"iface":"enp0s3","proto":"ipv4","address":"192.168.100.12", - "netmask":"255.255.255.0","broadcast":"192.168.100.254"}},"operation_type":"DELETED"} - - - description: Hotfix creation - event_payload: >- - {"data":{"checksum":"ded25e55c93121675adcb8d429dc586cbb351e3a","hotfix":"KB5005539", - "scan_time":"2021/10/14 02:24:18"},"operation":"INSERTED","type":"dbsync_hotfixes"} - alert_expected_values: - rule.id: '100381' - data: >- - {"type":"dbsync_hotfixes","hotfix":"KB5005539","operation_type":"INSERTED"} - - - description: Hotfix deletion - event_payload: >- - {"data":{"hotfix":"KB5005539","scan_time":"2021/10/14 02:40:41"},"operation":"DELETED", - "type":"dbsync_hotfixes"} - alert_expected_values: - rule.id: '100383' - data: '{"type":"dbsync_hotfixes","hotfix":"KB5005539","operation_type":"DELETED"}' diff --git a/tests/integration/test_analysisd/test_syscollector/data/test_cases/cases_syscollector_integration.yaml b/tests/integration/test_analysisd/test_syscollector/data/test_cases/cases_syscollector_integration.yaml new file mode 100644 index 0000000000..89ed4f74fc --- /dev/null +++ b/tests/integration/test_analysisd/test_syscollector/data/test_cases/cases_syscollector_integration.yaml @@ -0,0 +1,414 @@ +- name: Process creation + description: Process creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"argvs":"180","checksum":"343ed10dc637334a7400d01b8a28deb8db5cba28","cmd":"","egroup":"root", + "euser":"root","fgroup":"root","name":"sleep","nice":0,"nlwp":1,"pgrp":116167,"pid":"156102","ppid":116169, + "priority":20,"processor":3,"resident":129,"rgroup":"root","ruser":"root","scan_time":"2021/10/13 14:57:07", + "session":116167,"sgroup":"root","share":114,"size":2019,"start_time":5799612,"state":"S","stime":0, + "suser":"root","tgid":156102,"tty":0,"utime":0,"vm_size":8076},"operation":"INSERTED", + "type":"dbsync_processes"} + alert_expected_values: + rule.id: '100301' + data: >- + {"type":"dbsync_processes","process":{"pid":"156102","name":"sleep","state":"S","ppid":"116169","utime":"0", + "stime":"0","args":"180","euser":"root","ruser":"root","suser":"root","egroup":"root","rgroup":"root", + "sgroup":"root","fgroup":"root","priority":"20","nice":"0","size":"2019","vm_size":"8076","resident":"129", + "share":"114","start_time":"5799612","pgrp":"116167","session":"116167","nlwp":"1","tgid":"156102","tty":"0", + "processor":"3"},"operation_type":"INSERTED"} + +- name: Process modification + description: Process modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"argvs":"180","checksum":"45cb0637a5b43ed1a819ac6cb4cf4d6d4f15f87","cmd":"","egroup":"root", + "euser":"root","fgroup":"root","name":"sleep","nice":0,"nlwp":1,"pgrp":116167,"pid":"156102","ppid":116169, + "priority":10,"processor":3,"resident":129,"rgroup":"root","ruser":"root","scan_time":"2021/10/13 14:57:08", + "session":116167,"sgroup":"root","share":114,"size":2019,"start_time":5799612,"state":"S","stime":0, + "suser":"root","tgid":156102,"tty":0,"utime":0,"vm_size":8076},"operation":"MODIFIED", + "type":"dbsync_processes"} + alert_expected_values: + rule.id: '100302' + data: >- + {"type":"dbsync_processes","process":{"pid":"156102","name":"sleep","state":"S","ppid":"116169","utime":"0", + "stime":"0","args":"180","euser":"root","ruser":"root","suser":"root","egroup":"root","rgroup":"root", + "sgroup":"root","fgroup":"root","priority":"10","nice":"0","size":"2019","vm_size":"8076","resident":"129", + "share":"114","start_time":"5799612","pgrp":"116167","session":"116167","nlwp":"1","tgid":"156102","tty":"0", + "processor":"3"},"operation_type":"MODIFIED"} + +- name: Process deletion + description: Process deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"argvs":"180","checksum":"45cb0637a5b43ed1a819ac6cb4cf4d6d4f15f87","cmd":"","egroup":"root", + "euser":"root","fgroup":"root","name":"sleep","nice":0,"nlwp":1,"pgrp":116167,"pid":"156102","ppid":116169, + "priority":10,"processor":3,"resident":129,"rgroup":"root","ruser":"root","scan_time":"2021/10/13 14:57:09", + "session":116167,"sgroup":"root","share":114,"size":2019,"start_time":5799612,"state":"S","stime":0, + "suser":"root","tgid":156102,"tty":0,"utime":0,"vm_size":8076},"operation":"DELETED", + "type":"dbsync_processes"} + alert_expected_values: + rule.id: '100303' + data: >- + {"type":"dbsync_processes","process":{"pid":"156102","name":"sleep","state":"S","ppid":"116169","utime":"0", + "stime":"0","args":"180","euser":"root","ruser":"root","suser":"root","egroup":"root","rgroup":"root", + "sgroup":"root","fgroup":"root","priority":"10","nice":"0","size":"2019","vm_size":"8076","resident":"129", + "share":"114","start_time":"5799612","pgrp":"116167","session":"116167","nlwp":"1","tgid":"156102","tty":"0", + "processor":"3"},"operation_type":"DELETED"} + +- name: Port creation + description: Port creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"eff13e52290143eb5b5b9b8c191902609f37c712","inode":494908, + "item_id":"e2c92964ad145a635139f6318057506e386e00a3","local_ip":"0.0.0.0","local_port":34340,"pid":0, + "process":null,"protocol":"tcp","remote_ip":"0.0.0.0","remote_port":0,"rx_queue":0, + "scan_time":"2021/10/13 14:40:02","state":"listening","tx_queue":0},"operation":"INSERTED", + "type":"dbsync_ports"} + alert_expected_values: + rule.id: '100311' + data: >- + {"type":"dbsync_ports","port":{"protocol":"tcp","local_ip":"0.0.0.0","local_port":"34340", + "remote_ip":"0.0.0.0","remote_port":"0","tx_queue":"0","rx_queue":"0","inode":"494908","state":"listening", + "pid":"0"},"operation_type":"INSERTED"} + +- name: Port modification + description: Port modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"eff13e52290143eb5b5b9b8c191902609f37c713","inode":494908, + "item_id":"e2c92964ad145a635139f6318057506e386e00a3","local_ip":"0.0.0.0","local_port":34340,"pid":0, + "process":null,"protocol":"tcp","remote_ip":"0.0.0.0","remote_port":0,"rx_queue":1, + "scan_time":"2021/10/13 14:40:03","state":"listening","tx_queue":1},"operation":"MODIFIED", + "type":"dbsync_ports"} + alert_expected_values: + rule.id: '100312' + data: >- + {"type":"dbsync_ports","port":{"protocol":"tcp","local_ip":"0.0.0.0","local_port":"34340", + "remote_ip":"0.0.0.0","remote_port":"0","tx_queue":"1","rx_queue":"1","inode":"494908","state":"listening", + "pid":"0"},"operation_type":"MODIFIED"} + +- name: Port deletion + description: Port deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"eff13e52290143eb5b5b9b8c191902609f37c713","inode":494908, + "item_id":"e2c92964ad145a635139f6318057506e386e00a3","local_ip":"0.0.0.0","local_port":34340,"pid":0, + "process":null,"protocol":"tcp","remote_ip":"0.0.0.0","remote_port":0,"rx_queue":1, + "scan_time":"2021/10/13 14:40:04","state":"listening","tx_queue":1},"operation":"DELETED", + "type":"dbsync_ports"} + alert_expected_values: + rule.id: '100313' + data: >- + {"type":"dbsync_ports","port":{"protocol":"tcp","local_ip":"0.0.0.0","local_port":"34340", + "remote_ip":"0.0.0.0","remote_port":"0","tx_queue":"1","rx_queue":"1","inode":"494908","state":"listening", + "pid":"0"},"operation_type":"DELETED"} + +- name: Osinfo creation + description: Osinfo creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"1634140017886803554","architecture":"x86_64","hostname":"UBUNTU","os_build":"7601", + "os_major":"6","os_minor":"1","os_name":"Microsoft Windows 7","os_release":"sp1","os_version":"6.1.7601", + "os_display_version":"test"},"operation":"INSERTED","type":"dbsync_osinfo"} + alert_expected_values: + rule.id: '100321' + data: >- + {"type":"dbsync_osinfo","os":{"hostname":"UBUNTU","architecture":"x86_64","name":"Microsoft Windows 7", + "version":"6.1.7601","major":"6","minor":"1","build":"7601","os_release":"sp1","display_version":"test"}, + "operation_type":"INSERTED"} + +- name: Osinfo modification + description: Osinfo modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"1634140017886803554","architecture":"x86_64","hostname":"UBUNTU","os_build":"7601", + "os_major":"6","os_minor":"1","os_name":"Microsoft Windows 7","os_release":"sp1","os_version":"6.1.7601", + "os_display_version":"test_text"},"operation":"MODIFIED","type":"dbsync_osinfo"} + alert_expected_values: + rule.id: '100322' + data: >- + {"type":"dbsync_osinfo","os":{"hostname":"UBUNTU","architecture":"x86_64","name":"Microsoft Windows 7", + "version":"6.1.7601","major":"6","minor":"1","build":"7601","os_release":"sp1","display_version":"test_text"}, + "operation_type":"MODIFIED"} + +- name: Hwinfo creation + description: Hwinfo creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"scan_time":"2021/10/13 14:41:43","board_serial":"Intel Corporation", + "checksum":"af7b22eef8f5e06c04af4db49c9f8d1d28963918","cpu_MHz":2904,"cpu_cores":2, + "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","ram_free":2257872,"ram_total":4972208,"ram_usage":54}, + "operation":"INSERTED","type":"dbsync_hwinfo"} + alert_expected_values: + rule.id: '100331' + data: >- + {"type":"dbsync_hwinfo","hardware":{"serial":"Intel Corporation", + "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","cpu_cores":"2","cpu_mhz":"2904","ram_total":"4972208", + "ram_free":"2257872","ram_usage":"54"},"operation_type":"INSERTED"} + +- name: Hwinfo modification + description: Hwinfo modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"scan_time":"2021/10/13 14:41:44","board_serial":"Intel Corporation", + "checksum":"af7b22eef8f5e06c04af4db49c9f8d1d28963919","cpu_MHz":2904,"cpu_cores":4, + "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","ram_free":2257872,"ram_total":4972208,"ram_usage":54}, + "operation":"MODIFIED","type":"dbsync_hwinfo"} + alert_expected_values: + rule.id: '100332' + data: >- + {"type":"dbsync_hwinfo","hardware":{"serial":"Intel Corporation", + "cpu_name":"Intel(R) Core(TM) i5-9400 CPU @ 2.90GHz","cpu_cores":"4","cpu_mhz":"2904","ram_total":"4972208", + "ram_free":"2257872","ram_usage":"54"},"operation_type":"MODIFIED"} + +- name: Package creation + description: Package creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"architecture":"amd64","checksum":"1c1bf8bbc20caef77010f960461cc20fb9c67568", + "description":"Qt 5 OpenGL module","format":"deb","groups":"libs", + "item_id":"caa4868d177fbebc5b145a2a92497ebcf566838a","multiarch":"same","name":"libqt5opengl5", + "priority":"optional","scan_time":"2021/10/13 15:10:49","size":572,"source":"qtbase-opensource-src", + "vendor":"Ubuntu Developers ","version":"5.12.8+dfsg-0ubuntu1"}, + "operation":"INSERTED","type":"dbsync_packages"} + alert_expected_values: + rule.id: '100341' + data: >- + {"type":"dbsync_packages","program":{"format":"deb","name":"libqt5opengl5","priority":"optional", + "section":"libs","size":"572","vendor":"Ubuntu Developers ", + "version":"5.12.8+dfsg-0ubuntu1","architecture":"amd64","multiarch":"same","source":"qtbase-opensource-src", + "description":"Qt 5 OpenGL module"},"operation_type":"INSERTED"} + +- name: Package modification + description: Package modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"architecture":"amd64","checksum":"1c1bf8bbc20caef77010f960461cc20fb9c67569", + "description":"Qt 5 OpenGL module","format":"deb","groups":"libs", + "item_id":"caa4868d177fbebc5b145a2a92497ebcf566838a","multiarch":"same","name":"libqt5opengl5", + "priority":"option","scan_time":"2021/10/13 15:10:50","size":572,"source":"qtbase-opensource-src", + "vendor":"Ubuntu Developers ","version":"5.12.8+dfsg-0ubuntu1"}, + "operation":"MODIFIED","type":"dbsync_packages"} + alert_expected_values: + rule.id: '100342' + data: >- + {"type":"dbsync_packages","program":{"format":"deb","name":"libqt5opengl5","priority":"option", + "section":"libs","size":"572","vendor":"Ubuntu Developers ", + "version":"5.12.8+dfsg-0ubuntu1","architecture":"amd64","multiarch":"same","source":"qtbase-opensource-src", + "description":"Qt 5 OpenGL module"},"operation_type":"MODIFIED"} + +- name: Package deletion + description: Package deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"architecture":"amd64","checksum":"1c1bf8bbc20caef77010f960461cc20fb9c67569", + "description":"Qt 5 OpenGL module","format":"deb","groups":"libs", + "item_id":"caa4868d177fbebc5b145a2a92497ebcf566838a","multiarch":"same","name":"libqt5opengl5", + "priority":"option","scan_time":"2021/10/13 15:10:51","size":572,"source":"qtbase-opensource-src", + "vendor":"Ubuntu Developers ","version":"5.12.8+dfsg-0ubuntu1"}, + "operation":"DELETED","type":"dbsync_packages"} + alert_expected_values: + rule.id: '100343' + data: >- + {"type":"dbsync_packages","program":{"format":"deb","name":"libqt5opengl5","priority":"option", + "section":"libs","size":"572","vendor":"Ubuntu Developers ", + "version":"5.12.8+dfsg-0ubuntu1","architecture":"amd64","multiarch":"same","source":"qtbase-opensource-src", + "description":"Qt 5 OpenGL module"},"operation_type":"DELETED"} + +- name: Network interface creation + description: Network interface creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"adapter":null,"checksum":"ce57e9ae697de4e427b67fea0d28c25e130249b7", + "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","mac":"92:27:3b:ee:11:96","mtu":1500,"name":"dummy0", + "rx_bytes":0,"rx_dropped":0,"rx_errors":0,"rx_packets":0,"scan_time":"2021/10/13 18:32:06","state":"down", + "tx_bytes":0,"tx_dropped":0,"tx_errors":0,"tx_packets":0,"type":"ethernet"},"operation":"INSERTED", + "type":"dbsync_network_iface"} + alert_expected_values: + rule.id: '100351' + data: >- + {"type":"dbsync_network_iface","netinfo":{"iface":{"name":"dummy0","type":"ethernet","state":"down", + "mtu":"1500","mac":"92:27:3b:ee:11:96","tx_packets":"0","rx_packets":"0","tx_bytes":"0","rx_bytes":"0", + "tx_errors":"0","rx_errors":"0","tx_dropped":"0","rx_dropped":"0"}},"operation_type":"INSERTED"} + +- name: Network interface modification + description: Network interface modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"adapter":null,"checksum":"ce57e9ae697de4e427b67fea0d28c25e130249b8", + "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","mac":"92:27:3b:ee:11:96","mtu":1500,"name":"dummy0", + "rx_bytes":0,"rx_dropped":0,"rx_errors":0,"rx_packets":0,"scan_time":"2021/10/13 18:32:07","state":"up", + "tx_bytes":0,"tx_dropped":0,"tx_errors":0,"tx_packets":0,"type":"ethernet"},"operation":"MODIFIED", + "type":"dbsync_network_iface"} + alert_expected_values: + rule.id: '100352' + data: >- + {"type":"dbsync_network_iface","netinfo":{"iface":{"name":"dummy0","type":"ethernet","state":"up", + "mtu":"1500","mac":"92:27:3b:ee:11:96","tx_packets":"0","rx_packets":"0","tx_bytes":"0","rx_bytes":"0", + "tx_errors":"0","rx_errors":"0","tx_dropped":"0","rx_dropped":"0"}},"operation_type":"MODIFIED"} + +- name: Network protocol creation + description: Network protocol creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"3d8855caa85501d22b40fa6616c0670f206b2c4e","gateway":" ","dhcp":"enabled","iface":"dummy0", + "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","scan_time":"2021/10/13 18:32:06","type":"ethernet"}, + "operation":"INSERTED","type":"dbsync_network_protocol"} + alert_expected_values: + rule.id: '100361' + data: >- + {"type":"dbsync_network_protocol","netinfo":{"proto":{"iface":"dummy0","type":"ethernet","gateway":" ", + "dhcp":"enabled"}},"operation_type":"INSERTED"} + +- name: Network protocol modification + description: Network protocol modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"3d8855caa85501d22b40fa6616c0670f206b2ca4","gateway":" ","dhcp":"disabled","iface":"dummy0", + "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","scan_time":"2021/10/13 18:32:06","type":"ethernet"}, + "operation":"MODIFIED","type":"dbsync_network_protocol"} + alert_expected_values: + rule.id: '100362' + data: >- + {"type":"dbsync_network_protocol","netinfo":{"proto":{"iface":"dummy0","type":"ethernet","gateway":" ", + "dhcp":"disabled"}},"operation_type":"MODIFIED"} + +- name: Network protocol deletion + description: Network protocol deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"3d8855caa85501d22b40fa6616c0670f206b2ca4","gateway":" ","dhcp":"disabled","iface":"dummy0", + "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","scan_time":"2021/10/13 18:32:06","type":"ethernet"}, + "operation":"DELETED","type":"dbsync_network_protocol"} + alert_expected_values: + rule.id: '100363' + data: >- + {"type":"dbsync_network_protocol","netinfo":{"proto":{"iface":"dummy0","type":"ethernet","gateway":" ", + "dhcp":"disabled"}},"operation_type":"DELETED"} + +- name: Network interface deletion + description: Network interface deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"adapter":null,"checksum":"ce57e9ae697de4e427b67fea0d28c25e130249b8", + "item_id":"7ca46dd4c59f73c36a44ee5ebb0d0a37db4187a9","mac":"92:27:3b:ee:11:96","mtu":1500,"name":"dummy0", + "rx_bytes":0,"rx_dropped":0,"rx_errors":0,"rx_packets":0,"scan_time":"2021/10/13 18:32:07","state":"up", + "tx_bytes":0,"tx_dropped":0,"tx_errors":0,"tx_packets":0,"type":"ethernet"},"operation":"DELETED", + "type":"dbsync_network_iface"} + alert_expected_values: + rule.id: '100353' + data: >- + {"type":"dbsync_network_iface","netinfo":{"iface":{"name":"dummy0","type":"ethernet","state":"up", + "mtu":"1500","mac":"92:27:3b:ee:11:96","tx_packets":"0","rx_packets":"0","tx_bytes":"0","rx_bytes":"0", + "tx_errors":"0","rx_errors":"0","tx_dropped":"0","rx_dropped":"0"}},"operation_type":"DELETED"} + +- name: Network address creation + description: Network address creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"address":"192.168.100.12","broadcast":"192.168.100.255", + "checksum":"ec5e14340b8ced5b39cbcfa9abecbfdbd1f2873f","dhcp":"unknown","iface":"enp0s3", + "item_id":"7b4e5f1da50834d71d895a3065a3bb098a0b8a5c","metric":"100","netmask":"255.255.255.0","proto":0, + "scan_time":"2021/10/13 16:46:37"},"operation":"INSERTED","type":"dbsync_network_address"} + alert_expected_values: + rule.id: '100371' + data: >- + {"type":"dbsync_network_address","netinfo":{"addr":{"iface":"enp0s3","proto":"ipv4","address":"192.168.100.12", + "netmask":"255.255.255.0","broadcast":"192.168.100.255"}},"operation_type":"INSERTED"} + +- name: Network address modification + description: Network address modification events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"address":"192.168.100.12","broadcast":"192.168.100.254", + "checksum":"ec5e14340b8ced5b39cbcfa9abecbfdbd1f2873f","dhcp":"unknown","iface":"enp0s3", + "item_id":"7b4e5f1da50834d71d895a3065a3bb098a0b8a5c","metric":"100","netmask":"255.255.255.0","proto":0, + "scan_time":"2021/10/13 16:46:38"},"operation":"MODIFIED","type":"dbsync_network_address"} + alert_expected_values: + rule.id: '100372' + data: >- + {"type":"dbsync_network_address","netinfo":{"addr":{"iface":"enp0s3","proto":"ipv4","address":"192.168.100.12", + "netmask":"255.255.255.0","broadcast":"192.168.100.254"}},"operation_type":"MODIFIED"} + +- name: Network address deletion + description: Network address deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"address":"192.168.100.12","broadcast":"192.168.100.254", + "checksum":"ec5e14340b8ced5b39cbcfa9abecbfdbd1f2873f","dhcp":"unknown","iface":"enp0s3", + "item_id":"7b4e5f1da50834d71d895a3065a3bb098a0b8a5c","metric":"100","netmask":"255.255.255.0","proto":0, + "scan_time":"2021/10/13 16:46:39"},"operation":"DELETED","type":"dbsync_network_address"} + alert_expected_values: + rule.id: '100373' + data: >- + {"type":"dbsync_network_address","netinfo":{"addr":{"iface":"enp0s3","proto":"ipv4","address":"192.168.100.12", + "netmask":"255.255.255.0","broadcast":"192.168.100.254"}},"operation_type":"DELETED"} + +- name: Hotfix creation + description: Hotfix creation events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"checksum":"ded25e55c93121675adcb8d429dc586cbb351e3a","hotfix":"KB5005539", + "scan_time":"2021/10/14 02:24:18"},"operation":"INSERTED","type":"dbsync_hotfixes"} + alert_expected_values: + rule.id: '100381' + data: >- + {"type":"dbsync_hotfixes","hotfix":"KB5005539","operation_type":"INSERTED"} + +- name: Hotfix deletion + description: Hotfix deletion events + configuration_parameters: + metadata: + event_header: '(myhostname) any->syscollector:' + event_payload: >- + {"data":{"hotfix":"KB5005539","scan_time":"2021/10/14 02:40:41"},"operation":"DELETED", + "type":"dbsync_hotfixes"} + alert_expected_values: + rule.id: '100383' + data: '{"type":"dbsync_hotfixes","hotfix":"KB5005539","operation_type":"DELETED"}' diff --git a/tests/integration/test_analysisd/test_syscollector/test_syscollector_events.py b/tests/integration/test_analysisd/test_syscollector/test_syscollector_events.py deleted file mode 100644 index ade9bdf684..0000000000 --- a/tests/integration/test_analysisd/test_syscollector/test_syscollector_events.py +++ /dev/null @@ -1,152 +0,0 @@ -''' -copyright: Copyright (C) 2015-2022, Wazuh Inc. - - Created by Wazuh, Inc. . - - This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 - -type: integration - -brief: These tests will check if the Syscollector events, which are processed by - the `wazuh-analysisd` daemon, generates appropriate alerts based on the - information contained in the delta. - - -components: - - analysisd - -suite: syscollector - -targets: - - manager - -daemons: - - wazuh-analysisd - -os_platform: - - linux - -os_version: - - Arch Linux - - Amazon Linux 2 - - Amazon Linux 1 - - CentOS 8 - - CentOS 7 - - Debian Buster - - Red Hat 8 - - Ubuntu Focal - - Ubuntu Bionic - -references: - - https://documentation.wazuh.com/current/user-manual/capabilities/syscollector.html\ - #using-syscollector-information-to-trigger-alerts -''' -import os -import yaml -import pytest - -from wazuh_testing.tools import (ANALYSISD_QUEUE_SOCKET_PATH, ALERT_FILE_PATH) -from wazuh_testing.analysis import CallbackWithContext, callback_check_syscollector_alert - - -# Marks -pytestmark = [pytest.mark.linux, pytest.mark.tier(level=0), pytest.mark.server] - - -# Variables -receiver_sockets_params = [(ANALYSISD_QUEUE_SOCKET_PATH, 'AF_UNIX', 'UDP')] -receiver_sockets = None -alert_timeout = 10 -file_to_monitor = ALERT_FILE_PATH - -# Configurations -data_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') -messages_path = os.path.join(data_dir, 'syscollector.yaml') -with open(messages_path) as f: - test_cases = yaml.safe_load(f) -local_internal_options = {'analysisd.debug': '2'} - - -# Fixtures -@pytest.fixture(scope='module', params=test_cases, ids=[test_case['name'] for test_case in test_cases]) -def get_configuration(request): - """Get configurations from the module.""" - return request.param - - -# Tests -@pytest.mark.parametrize('test_case', - list(test_cases), - ids=[test_case['name'] for test_case in test_cases]) -def test_syscollector_events(test_case, configure_local_internal_options_module, get_configuration, mock_agent_module, - configure_custom_rules, restart_analysisd, wait_for_analysisd_startup, - connect_to_sockets_function, file_monitoring): - ''' - description: Check if Analysisd handle Syscollector deltas properly by generating alerts. - - wazuh_min_version: 4.4.0 - - tier: 2 - - parameters: - - get_configuration: - type: fixture - brief: Get configurations from the module. - - mock_agent_module: - type: fixture - brief: Create mock agent and get agent_id - - configure_custom_rules: - type: fixture - brief: Copy custom rules to test. - - restart_analysisd: - type: fixture - brief: Restart analysisd daemon and truncate related log files. - - wait_for_analysisd_startup: - type: fixture - brief: Wait until analysisd is ready. - - connect_to_sockets_function: - type: fixture - brief: Connect to analysisd event queue. - - file_monitoring: - type: fixture - brief: Handle the monitoring of a specified file. - - assertions: - - Verify that specific syscollector deltas trigger specific custom alert with certain values. - - input_description: - Input dataset (defined as event_header + event_payload in syscollector.yaml) - cover, in most of the cases, INSERTED, MODIFIED and DELETED deltas - for each of the available scan; osinfo, hwinfo, processes, packages, network_interface, - network_address, network_protocol, ports and hotfixes. - - expected_output: - Expected output (defined as alert_expected_values in syscollector.yaml) - - tags: - - rules - ''' - - # Get mock agent_id to create syscollector header - agent_id = mock_agent_module - event_header = f"d:[{agent_id}] {test_case['event_header']}" - - for stage in test_case['test_case']: - - # Add agent_id alert check - alert_expected_values = stage['alert_expected_values'] - alert_expected_values['agent.id'] = agent_id - - # Create full message by header and payload concatenation - test_msg = event_header + stage['event_payload'] - - # Send delta to analysisd queue - receiver_sockets[0].send(test_msg) - - # Set callback according to stage parameters - alert_callback = CallbackWithContext(callback_check_syscollector_alert, alert_expected_values) - - # Find expected outputs - log_monitor.start(timeout=alert_timeout, - callback=alert_callback, - error_message=f"Timeout expecting {stage['description']} message.") diff --git a/tests/integration/test_analysisd/test_syscollector/test_syscollector_integration.py b/tests/integration/test_analysisd/test_syscollector/test_syscollector_integration.py new file mode 100644 index 0000000000..be9ad033e2 --- /dev/null +++ b/tests/integration/test_analysisd/test_syscollector/test_syscollector_integration.py @@ -0,0 +1,137 @@ +''' +copyright: Copyright (C) 2015-2023, Wazuh Inc. + Created by Wazuh, Inc. . + This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +type: integration + +brief: These tests will check if Analysisd handle Syscollector deltas + properly by generating alerts. + +components: + - analysisd + +suite: syscollector + +targets: + - manager + +daemons: + - wazuh-analysisd + +os_platform: + - linux + +os_version: + - Amazon Linux 2 + - Amazon Linux 1 + - CentOS 8 + - CentOS 7 + - Ubuntu Focal + - Ubuntu Bionic + +references: + - https://documentation.wazuh.com/current/user-manual/capabilities/syscollector.html\ + #using-syscollector-information-to-trigger-alerts +''' +import os +import pytest + +from wazuh_testing.tools.configuration import get_test_cases_data +from wazuh_testing.tools import ANALYSISD_QUEUE_SOCKET_PATH, ALERT_FILE_PATH +from wazuh_testing.analysis import CallbackWithContext, callback_check_syscollector_alert + +pytestmark = [pytest.mark.server] + +# Generic vars +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') +TEST_RULES_PATH = os.path.join(TEST_DATA_PATH, 'rules') + +local_internal_options = {'analysisd.debug': '2'} +receiver_sockets_params = [(ANALYSISD_QUEUE_SOCKET_PATH, 'AF_UNIX', 'UDP')] +receiver_sockets = None +alert_timeout = 5 +file_to_monitor = ALERT_FILE_PATH + +# ---------------------------------------- TEST_SYSCOLLECTOR_EVENTS ------------------------------------- +# Configuration and cases data +cases_path = os.path.join(TEST_CASES_PATH, 'cases_syscollector_integration.yaml') +rule_file = "syscollector_rules.xml" + +# Enabled test configurations +_, configuration_metadata, case_ids = get_test_cases_data(cases_path) + + +@pytest.mark.tier(level=2) +@pytest.mark.parametrize('metadata', configuration_metadata, ids=case_ids) +def test_syscollector_integration(metadata, configure_local_internal_options_module, mock_agent_module, + configure_custom_rules, restart_analysisd, wait_for_analysisd_startup, + connect_to_sockets_function, file_monitoring): + """ + description: Check if Analysisd handle Syscollector deltas properly by generating alerts. + + wazuh_min_version: 4.4.0 + + tier: 2 + + parameters: + - metadata: + type: dict + brief: Get metadata from the module. + - mock_agent_module: + type: fixture + brief: Create mock agent and get agent_id + - configure_custom_rules: + type: fixture + brief: Copy custom rules to test. + - restart_analysisd: + type: fixture + brief: Restart analysisd daemon and truncate related log files. + - wait_for_analysisd_startup: + type: fixture + brief: Wait until analysisd is ready. + - connect_to_sockets_function: + type: fixture + brief: Connect to analysisd event queue. + - file_monitoring: + type: fixture + brief: Handle the monitoring of a specified file. + + assertions: + - Verify that specific syscollector deltas trigger specific custom alert with certain values. + + input_description: + Input dataset (defined as event_header + event_payload in cases_syscollector_integration.yaml) + cover, in most of the cases, INSERTED, MODIFIED and DELETED deltas + for each of the available scan; osinfo, hwinfo, processes, packages, network_interface, + network_address, network_protocol, ports and hotfixes. + + expected_output: + Expected output (defined as alert_expected_values in cases_syscollector_integration.yaml) + + tags: + - rules + """ + + # Get mock agent_id to create syscollector header + agent_id = mock_agent_module + event_header = f"d:[{agent_id}] {metadata['event_header']}" + + # Add agent_id alert check + alert_expected_values = metadata['alert_expected_values'] + alert_expected_values['agent.id'] = agent_id + + # Create full message by header and payload concatenation + test_msg = event_header + metadata['event_payload'] + + # Send delta to analysisd queue + receiver_sockets[0].send(test_msg) + + # Set callback according to stage parameters + alert_callback = CallbackWithContext(callback_check_syscollector_alert, alert_expected_values) + + # Find expected outputs + log_monitor.start(timeout=alert_timeout, + callback=alert_callback, + error_message=f"Timeout expecting {metadata['description']} message.") From 46c4f86001939fdc4f80f4b580cc859d59b9c2b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Andr=C3=A9s=20Carmelo=20Micalizzi=20Casali?= Date: Fri, 2 Jun 2023 13:01:43 -0300 Subject: [PATCH 07/10] Fix FIM test_large_changes module (#4219) * docs(#3860): update changelog * feat(#3860): add new fixtures * refactor(#3860): update test_large_changes * docs(#3860): remove typo * docs(#3860): updated docu and cases description --- CHANGELOG.md | 1 + .../modules/fim/event_monitor.py | 35 ++++ .../configuration_large_changes.yaml | 32 +++ .../data/test_cases/cases_large_changes.yaml | 155 ++++++++++++++ .../test_report_changes/test_large_changes.py | 193 ++++++------------ 5 files changed, 289 insertions(+), 127 deletions(-) create mode 100644 tests/integration/test_fim/test_files/test_report_changes/data/configuration_template/configuration_large_changes.yaml create mode 100644 tests/integration/test_fim/test_files/test_report_changes/data/test_cases/cases_large_changes.yaml diff --git a/CHANGELOG.md b/CHANGELOG.md index 21c1b5bdba..b1320511a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ All notable changes to this project will be documented in this file. ### Changed - Skip `test_authd_ssl_options` cases that use TLS 1.1 causing errors on several OpenSSL versions \- (Tests) +- Fix FIM test_large_changes test suite ([#3948](https://github.com/wazuh/wazuh-qa/pull/3948)) \- (Tests) - Update `get_test_cases_data` function so it handles fim_mode parameter ([#4185](https://github.com/wazuh/wazuh-qa/pull/4185)) \- (Framework) - Change FIM `regular_file_cud` and `EventChecker` file modification steps ([#4183](https://github.com/wazuh/wazuh-qa/pull/4183)) \- (Framework + Tests) - Refactor library to change the environment ([#4145](https://github.com/wazuh/wazuh-qa/pull/4145)) \- (Framework) diff --git a/deps/wazuh_testing/wazuh_testing/modules/fim/event_monitor.py b/deps/wazuh_testing/wazuh_testing/modules/fim/event_monitor.py index 131a51a14c..a972ee78da 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/fim/event_monitor.py +++ b/deps/wazuh_testing/wazuh_testing/modules/fim/event_monitor.py @@ -388,6 +388,19 @@ def callback_detect_file_deleted_event(line): return None +def callback_detect_file_more_changes(line): + """ Callback that detects if a line in a log contains 'More changes' in content_changes. + Args: + line (String): string line to be checked by callback in FileMonitor. + Returns: + returns JSON string from log. + """ + json_event = callback_detect_event(line) + if json_event is not None and 'content_changes' in json_event['data']: + if 'More changes' in json_event['data']['content_changes']: + return json_event + + def callback_audit_cannot_start(line): """ Callback that detects if a line shows whodata engine could not start and monitoring switched to realtime. @@ -501,3 +514,25 @@ def detect_windows_whodata_mode_change(file_monitor, file='.*'): file_monitor.start(timeout=T_60, callback=generate_monitoring_callback(pattern), error_message=ERR_MSG_WHODATA_REALTIME_MODE_CHANGE_EVENT) + + +def get_fim_event(file_monitor=None, callback='', error_message=None, update_position=True, + timeout=T_60, accum_results=1, file_to_monitor=LOG_FILE_PATH): + """ Check if FIM event occurs and return it according to the callback. + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + callback (str): log regex to check in Wazuh log + error_message (str): error message to show in case of expected event does not occur + update_position (boolean): filter configuration parameter to search in Wazuh log + timeout (str): timeout to check the event in Wazuh log + accum_results (int): Accumulation of matches. + Returns: + returns the value given by the callback used. Default None. + """ + file_monitor = FileMonitor(file_to_monitor) if file_monitor is None else file_monitor + error_message = f"Could not find this event in {file_to_monitor}: {callback}" if error_message is None else \ + error_message + + result = file_monitor.start(timeout=timeout, update_position=update_position, accum_results=accum_results, + callback=callback, error_message=error_message).result() + return result diff --git a/tests/integration/test_fim/test_files/test_report_changes/data/configuration_template/configuration_large_changes.yaml b/tests/integration/test_fim/test_files/test_report_changes/data/configuration_template/configuration_large_changes.yaml new file mode 100644 index 0000000000..7c2542c201 --- /dev/null +++ b/tests/integration/test_fim/test_files/test_report_changes/data/configuration_template/configuration_large_changes.yaml @@ -0,0 +1,32 @@ +- sections: + - section: syscheck + elements: + - disabled: + value: 'no' + - frequency: + value: INTERVAL + - directories: + value: TEST_DIRECTORIES + attributes: + - check_all: 'yes' + - realtime: REALTIME + - whodata: WHODATA + - report_changes: 'yes' + - diff_size_limit: 200KB + + - section: sca + elements: + - enabled: + value: 'no' + + - section: rootcheck + elements: + - disabled: + value: 'yes' + + - section: wodle + attributes: + - name: syscollector + elements: + - disabled: + value: 'yes' diff --git a/tests/integration/test_fim/test_files/test_report_changes/data/test_cases/cases_large_changes.yaml b/tests/integration/test_fim/test_files/test_report_changes/data/test_cases/cases_large_changes.yaml new file mode 100644 index 0000000000..f0eb994182 --- /dev/null +++ b/tests/integration/test_fim/test_files/test_report_changes/data/test_cases/cases_large_changes.yaml @@ -0,0 +1,155 @@ +- name: Test changes smaller than limit (Scheduled mode) + description: Test that changes are smaller than limit, 'More changes' does not appear in content_changes + configuration_parameters: + INTERVAL: 4 + REALTIME: 'no' + WHODATA: 'no' + metadata: + filename: regular_1 + original_size: 500 + modified_size: 500 + has_more_changes: false + fim_mode: scheduled + +- name: Test changes smaller than limit (Realtime mode) + description: Test that changes are smaller than limit, 'More changes' does not appear in content_changes + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'yes' + WHODATA: 'no' + metadata: + filename: regular_1 + original_size: 500 + modified_size: 500 + has_more_changes: false + fim_mode: realtime + +- name: Test changes smaller than limit (Whodata mode) + description: Test that changes are smaller than limit, 'More changes' does not appear in content_changes + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'no' + WHODATA: 'yes' + metadata: + filename: regular_1 + original_size: 500 + modified_size: 500 + has_more_changes: false + fim_mode: whodata + +- name: Test large changes - Same size (Scheduled mode) + description: Test when changes are same size of set limit, 'More changes' appears in content_changes + configuration_parameters: + INTERVAL: 4 + REALTIME: 'no' + WHODATA: 'no' + metadata: + filename: regular_2 + original_size: 200000 + modified_size: 200000 + has_more_changes: true + fim_mode: scheduled + +- name: Test large changes - Same size (Realtime mode) + description: Test when changes are same size of set limit, 'More changes' appears in content_changes + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'yes' + WHODATA: 'no' + metadata: + filename: regular_2 + original_size: 200000 + modified_size: 200000 + has_more_changes: true + fim_mode: realtime + +- name: Test large changes - Same size (Whodata mode) + description: Test when changes are same size of set limit, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'no' + WHODATA: 'yes' + metadata: + filename: regular_2 + original_size: 200000 + modified_size: 200000 + has_more_changes: true + fim_mode: whodata + +- name: Test large changes - File bigger after change (Scheduled mode) + description: Test that changes are bigger than limit, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 4 + REALTIME: 'no' + WHODATA: 'no' + metadata: + filename: regular_3 + original_size: 10 + modified_size: 200000 + has_more_changes: true + fim_mode: scheduled + +- name: Test large changes - File bigger after change (Realtime mode) + description: Test that changes are bigger than limit, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'yes' + WHODATA: 'no' + metadata: + filename: regular_3 + original_size: 10 + modified_size: 200000 + has_more_changes: true + fim_mode: realtime + +- name: Test large changes - File bigger after change (Whodata mode) + description: Test that changes are bigger than limit, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'no' + WHODATA: 'yes' + metadata: + filename: regular_3 + original_size: 10 + modified_size: 200000 + has_more_changes: true + fim_mode: whodata + +- name: Test large changes - File smaller after change (Scheduled mode) + description: Test when file is smaller after change, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 4 + REALTIME: 'no' + WHODATA: 'no' + metadata: + filename: regular_4 + original_size: 200000 + modified_size: 10 + has_more_changes: true + fim_mode: scheduled + +- name: Test large changes - File smaller after change (Realtime mode) + description: Test when file is smaller after change, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'yes' + WHODATA: 'no' + metadata: + filename: regular_4 + original_size: 200000 + modified_size: 10 + has_more_changes: true + fim_mode: realtime + +- name: Test large changes - File smaller after change (Whodata mode) + description: Test when file is smaller after change, 'More changes' appears in content_changes. + configuration_parameters: + INTERVAL: 10000 + REALTIME: 'no' + WHODATA: 'yes' + metadata: + filename: regular_4 + original_size: 200000 + modified_size: 10 + has_more_changes: true + fim_mode: whodata diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py b/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py index bdc6c3c619..2a58e62cae 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py @@ -64,81 +64,50 @@ tags: - fim_report_changes ''' -import gzip import os -import shutil -import subprocess import sys import pytest from wazuh_testing.tools import PREFIX -from wazuh_testing.tools.configuration import load_wazuh_configurations +from wazuh_testing.tools.configuration import get_test_cases_data, load_configuration_template from wazuh_testing.tools.monitoring import FileMonitor -from wazuh_testing import global_parameters, LOG_FILE_PATH, REGULAR -from wazuh_testing.modules.fim import FIM_DEFAULT_LOCAL_INTERNAL_OPTIONS -from wazuh_testing.modules.fim.event_monitor import callback_detect_event -from wazuh_testing.modules.fim.utils import create_file, generate_params -from test_fim.common import generate_string, make_diff_file_path +from wazuh_testing import global_parameters, LOG_FILE_PATH, REGULAR, T_20 +from wazuh_testing.modules.fim import TEST_DIR_1 +from wazuh_testing.modules.fim import FIM_DEFAULT_LOCAL_INTERNAL_OPTIONS as local_internal_options +from wazuh_testing.modules.fim.event_monitor import (callback_detect_event, get_fim_event, + callback_detect_file_more_changes) +from wazuh_testing.modules.fim.utils import create_file +from test_fim.common import generate_string -# Marks +# Marks pytestmark = pytest.mark.tier(level=1) +# Reference paths +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') +TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') -# variables -local_internal_options = FIM_DEFAULT_LOCAL_INTERNAL_OPTIONS -test_directories = [os.path.join(PREFIX, 'testdir')] -nodiff_file = os.path.join(PREFIX, 'testdir_nodiff', 'regular_file') -directory_str = ','.join(test_directories) -testdir = test_directories[0] -unzip_diff_dir = os.path.join(PREFIX, 'unzip_diff') - -wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) -test_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') -configurations_path = os.path.join(test_data_path, 'wazuh_conf.yaml') - -# configurations - -conf_params, conf_metadata = generate_params(extra_params={'REPORT_CHANGES': {'report_changes': 'yes'}, - 'TEST_DIRECTORIES': directory_str, - 'NODIFF_FILE': nodiff_file}) -configurations = load_wazuh_configurations(configurations_path, __name__, params=conf_params, metadata=conf_metadata) - - -# fixtures - -@pytest.fixture(scope='module', params=configurations) -def get_configuration(request): - """Get configurations from the module.""" - return request.param - - -# Functions - - -def extra_configuration_before_yield(): - """Create a folder to store diff files unzipped""" - os.makedirs(unzip_diff_dir, exist_ok=True) +# Configuration and cases data +test_cases_path = os.path.join(TEST_CASES_PATH, 'cases_large_changes.yaml') +configurations_path = os.path.join(CONFIGURATIONS_PATH, 'configuration_large_changes.yaml') +# Variables +test_directories = [os.path.join(PREFIX, TEST_DIR_1)] +testdir = test_directories[0] -def extra_configuration_after_yield(): - """Delete the folder after the test""" - shutil.rmtree(unzip_diff_dir, ignore_errors=True) +# Test configurations +configuration_parameters, configuration_metadata, test_case_ids = get_test_cases_data(test_cases_path) +for count, value in enumerate(configuration_parameters): + configuration_parameters[count]['TEST_DIRECTORIES'] = testdir +configurations = load_configuration_template(configurations_path, configuration_parameters, configuration_metadata) # Tests -@pytest.mark.skip('Test skipped for flaky behavior, after it is fixed by Issue wazuh/wazuh#3783, it will be unblocked') -@pytest.mark.parametrize('filename, folder, original_size, modified_size', [ - ('regular_0', testdir, 500, 500), - ('regular_1', testdir, 30000, 30000), - ('regular_2', testdir, 70000, 70000), - ('regular_3', testdir, 10, 20000), - ('regular_4', testdir, 10, 70000), - ('regular_5', testdir, 20000, 10), - ('regular_6', testdir, 70000, 10), -]) -def test_large_changes(filename, folder, original_size, modified_size, get_configuration, configure_environment, - configure_local_internal_options_module, restart_syscheckd, wait_for_fim_start): +@pytest.mark.parametrize('test_folders', [test_directories], scope="module", ids='') +@pytest.mark.parametrize('configuration, metadata', zip(configurations, configuration_metadata), ids=test_case_ids) +def test_large_changes(configuration, metadata, set_wazuh_configuration, configure_local_internal_options_function, + create_monitored_folders_module, restart_syscheck_function, wait_syscheck_start): ''' description: Check if the 'wazuh-syscheckd' daemon detects the character limit in the file changes is reached showing the 'More changes' tag in the 'content_changes' field of the generated events. For this @@ -147,38 +116,35 @@ def test_large_changes(filename, folder, original_size, modified_size, get_confi the test will verify that the generated FIM event contains in its 'content_changes' field the proper value depending on the test case. - wazuh_min_version: 4.2.0 + wazuh_min_version: 4.5.0 tier: 1 parameters: - - filename: - type: str - brief: Name of the testing file to be created. - - folder: - type: str - brief: Path to the directory where the testing files will be created. - - original_size: - type: int - brief: Size of the testing file in bytes before being modified. - - modified_size: - type: int - brief: Size of the testing file in bytes after being modified. - - get_configuration: + - configuration: + type: dict + brief: Configuration values. + - metadata: + type: dict + brief: Test case data. + - test_folders: + type: dict + brief: List of folders to be created for monitoring. + - set_wazuh_configuration: type: fixture - brief: Get configurations from the module. - - configure_environment: + brief: Set wazuh configuration. + - create_monitored_folders_module: type: fixture - brief: Configure a custom environment for testing. - - configure_local_internal_options_module: + brief: Create a given list of folders when the module starts. Delete the folders at the end of the module. + - configure_local_internal_options_function: type: fixture - brief: Configure the local internal options file. - - restart_syscheckd: + brief: Set the local internal options file. + - restart_syscheck_function: type: fixture - brief: Clear the 'ossec.log' file and start a new monitor. - - wait_for_fim_start: + brief: restart syscheckd daemon, and truncate the logs file. + - wait_syscheck_start: type: fixture - brief: Wait for realtime start, whodata start, or end of initial FIM scan. + brief: check that the starting FIM scan is detected. assertions: - Verify that FIM events are generated when adding and modifying the testing file. @@ -190,68 +156,41 @@ def test_large_changes(filename, folder, original_size, modified_size, get_confi of the monitored file when the old content is lower than the allowed limit or the testing platform is Windows. - input_description: A test case (ossec_conf_report) is contained in external YAML file (wazuh_conf.yaml) - which includes configuration settings for the 'wazuh-syscheckd' daemon and, these - are combined with the testing directory and files to be monitored defined in the module. + input_description: The file 'configuration_large_changes.yaml' provides the configuration template. + The file 'cases_large_changes.yaml' provides the test cases configuration + details for each test case. expected_output: - r'.*Sending FIM event: (.+)$' ('added' and 'modified' events) - - The length of the testing file content by running the diff/fc command. - - tags: - - diff - - scheduled + - The 'More changes' message appears in content_changes when the changes size is bigger than the set limit. ''' - limit = 59391 - has_more_changes = False - original_file = os.path.join(folder, filename) - unzip_diff_file = os.path.join(unzip_diff_dir, filename + '-old') - diff_file_path = make_diff_file_path(folder, filename) - - fim_mode = get_configuration['metadata']['fim_mode'] + wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) + limit = 50000 # Create the file and and capture the event. - original_string = generate_string(original_size, '0') - create_file(REGULAR, folder, filename, content=original_string) - - wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_event).result() + original_string = generate_string(metadata['original_size'], '0') + create_file(REGULAR, testdir, metadata['filename'], content=original_string) - # Store uncompressed diff file in backup folder - with gzip.open(diff_file_path, 'rb') as f_in: - with open(unzip_diff_file, 'wb') as f_out: - shutil.copyfileobj(f_in, f_out) + wazuh_log_monitor.start(timeout=T_20, callback=callback_detect_event, + error_message="Did not receive the expected FIM event").result() # Modify the file with new content - modified_string = generate_string(modified_size, '1') - create_file(REGULAR, folder, filename, content=modified_string) - - event = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_event).result() - - # Run the diff/fc command and get the output length - try: - if sys.platform == 'win32': - subprocess.check_output(['fc', '/n', original_file, unzip_diff_file]) - else: - subprocess.check_output(['diff', original_file, unzip_diff_file]) - except subprocess.CalledProcessError as e: - # Inputs are different - if e.returncode == 1: - if sys.platform == 'win32' and b'*' not in e.output.split(b'\r\n')[1]: - has_more_changes = True - else: - if len(e.output) > limit: - has_more_changes = True + modified_string = generate_string(metadata['modified_size'], '1') + create_file(REGULAR, testdir, metadata['filename'], content=modified_string) # Assert 'More changes' is shown when the command returns more than 'limit' characters - if has_more_changes: - assert 'More changes' in event['data']['content_changes'], '"More changes" not found within content_changes.' + if metadata['has_more_changes']: + event = get_fim_event(timeout=T_20, callback=callback_detect_file_more_changes, + error_message='Did not find event with "More changes" within content_changes.') else: + event = wazuh_log_monitor.start(timeout=T_20, callback=callback_detect_event, + error_message="Did not receive the expected FIM event").result() assert 'More changes' not in event['data']['content_changes'], '"More changes" found within content_changes.' # Assert old content is shown in content_changes assert '0' in event['data']['content_changes'], '"0" is the old value but it is not found within content_changes' # Assert new content is shown when old content is lower than the limit or platform is Windows - if original_size < limit or sys.platform == 'win32': + if metadata['original_size'] < limit or sys.platform == 'win32': assert '1' in event['data']['content_changes'], '"1" is the new value but it is not found ' \ 'within content_changes' From f36a43dae566582208a794d076febc6cea343618 Mon Sep 17 00:00:00 2001 From: quebim Date: Tue, 6 Jun 2023 12:05:31 -0300 Subject: [PATCH 08/10] style(#4188): fix linting errors --- .../data/enroll_ssl_options_tests.yaml | 124 +++++++++--------- .../test_authd/test_authd_ssl_options.py | 8 +- 2 files changed, 66 insertions(+), 66 deletions(-) diff --git a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml index b07d936ca0..109a9c3ec6 100644 --- a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml +++ b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml @@ -1,86 +1,86 @@ - - name: "SSL - Default" - description: "Default ssl configuration" + name: SSL - Default + description: Default ssl configuration test_case: - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" + expect: output + input: OSSEC A:'user1' + output: OSSEC K:' + ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + protocol: ssl_tlsv1_2 - - name: "SSL - Wrong ciphers" - description: "Send a message with low encryption cypher suites" + name: SSL - Wrong ciphers + description: Send a message with low encryption cypher suites test_case: - expect: "open_error" - input: "OSSEC A:'user1'" - error: "handshake failure" - ciphers: "CAMELLIA" - protocol: "ssl_tlsv1_2" + expect: open_error + input: OSSEC A:'user1' + error: handshake failure + ciphers: CAMELLIA + protocol: ssl_tlsv1_2 - - name: "SSL - Incompatible ciphers from Agent" - description: "Send a message with low encryption cypher suites" + name: SSL - Incompatible ciphers from Agent + description: Send a message with low encryption cypher suites test_case: - ciphers: "CAMELLIA" - protocol: "ssl_tlsv1_2" - expect: "open_error" - error: "handshake failure" + ciphers: CAMELLIA + protocol: ssl_tlsv1_2 + expect: open_error + error: handshake failure - - name: "SSL - Incompatible ciphers from Manger" - description: "Send a message with low encryption cypher suites" + name: SSL - Incompatible ciphers from Manger + description: Send a message with low encryption cypher suites test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" - expect: "open_error" - error: "handshake failure" + ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + protocol: ssl_tlsv1_2 + expect: open_error + error: handshake failure # Override ossec.conf - CIPHERS: "CAMELLIA" + CIPHERS: CAMELLIA - - name: "SSL - Compatible ciphers from Agent" - description: "Send a message with a compatible yet not default cypher" + name: SSL - Compatible ciphers from Agent + description: Send a message with a compatible yet not default cypher test_case: - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - ciphers: "SHA256" - protocol: "ssl_tlsv1_2" + expect: output + input: OSSEC A:'user1' + output: OSSEC K:' + ciphers: SHA256 + protocol: ssl_tlsv1_2 - - name: "SSL - Wrong TLS version (TLSV1_1)" - description: "Send a message with a different TLS version with Auto negotiate disabled" + name: SSL - Wrong TLS version (TLSV1_1) + description: Send a message with a different TLS version with Auto negotiate disabled test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_1" - expect: "open_error" - error: "alert protocol version" + ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + protocol: ssl_tlsv1_1 + expect: open_error + error: alert protocol version # Override ossec.conf SSL_AUTO_NEGOTIATE: 'no' - - name: "SSL - Auto Negotiate TLS version (TLSV1_1)" - description: "Send a message with a different TLS version with Auto negotiate enabled" + name: SSL - Auto Negotiate TLS version (TLSV1_1) + description: Send a message with a different TLS version with Auto negotiate enabled test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_1" - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" + ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + protocol: ssl_tlsv1_1 + expect: output + input: OSSEC A:'user1' + output: OSSEC K:' # Override ossec.conf SSL_AUTO_NEGOTIATE: 'yes' - - name: "SSL - Compatible ciphers from Manger" - description: "Send a message with a compatible yet not default cypher" + name: SSL - Compatible ciphers from Manger + description: Send a message with a compatible yet not default cypher test_case: - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" + expect: output + input: OSSEC A:'user1' + output: OSSEC K:' + ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + protocol: ssl_tlsv1_2 # Override ossec.conf - CIPHERS: "SHA256" + CIPHERS: SHA256 - - name: "Valid Certificates - Manager verification without host" - description: "Enables CA Certificate and validates that conneciton is acepted when valid certs are provided" + name: Valid Certificates - Manager verification without host + description: Enables CA Certificate and validates that conneciton is acepted when valid certs are provided test_case: - ciphers: "HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH" - protocol: "ssl_tlsv1_2" - expect: "output" - input: "OSSEC A:'user1'" - output: "OSSEC K:'" + ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH + protocol: ssl_tlsv1_2 + expect: output + input: OSSEC A:'user1' + output: OSSEC K:' diff --git a/tests/integration/test_authd/test_authd_ssl_options.py b/tests/integration/test_authd/test_authd_ssl_options.py index 8b30415d8a..d469d234d3 100644 --- a/tests/integration/test_authd/test_authd_ssl_options.py +++ b/tests/integration/test_authd/test_authd_ssl_options.py @@ -186,16 +186,16 @@ def test_ossec_auth_configurations(get_configuration, configure_environment, con ciphers = config['ciphers'] protocol = config['protocol'] expect = config['expect'] - + if protocol == 'ssl_tlsv1_1': pytest.skip('TLS 1.1 is deprecated and not working on several pyOpenSSL versions.') - + override_wazuh_conf(get_configuration) - + address, family, connection_protocol = receiver_sockets_params[0] SSL_socket = SocketController(address, family=family, connection_protocol=connection_protocol, open_at_start=False) - + SSL_socket.set_ssl_configuration(ciphers=ciphers, connection_protocol=protocol) try: From f040899c85b033d72895732c2d6bfa4cb3a46fd7 Mon Sep 17 00:00:00 2001 From: quebim Date: Tue, 6 Jun 2023 16:22:00 -0300 Subject: [PATCH 09/10] style(#4188): fix typo --- .../integration/test_authd/data/enroll_ssl_options_tests.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml index 109a9c3ec6..f25af41f7f 100644 --- a/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml +++ b/tests/integration/test_authd/data/enroll_ssl_options_tests.yaml @@ -25,7 +25,7 @@ expect: open_error error: handshake failure - - name: SSL - Incompatible ciphers from Manger + name: SSL - Incompatible ciphers from Manager description: Send a message with low encryption cypher suites test_case: ciphers: HIGH:!ADH:!EXP:!MD5:!RC4:!3DES:!CAMELLIA:@STRENGTH @@ -65,7 +65,7 @@ # Override ossec.conf SSL_AUTO_NEGOTIATE: 'yes' - - name: SSL - Compatible ciphers from Manger + name: SSL - Compatible ciphers from Manager description: Send a message with a compatible yet not default cypher test_case: expect: output From 49b52bacabc8db4ba150298075ff0a2bfdb876a1 Mon Sep 17 00:00:00 2001 From: quebim Date: Fri, 30 Jun 2023 09:40:34 -0300 Subject: [PATCH 10/10] merge(#4188): merge 4.6.0 into 4188-fix-openssl-errors --- .github/workflows/changelog.yaml | 1 + .github/workflows/code_analysis.yaml | 1 + CHANGELOG.md | 50 ++- deps/wazuh_testing/setup.py | 15 +- deps/wazuh_testing/wazuh_testing/__init__.py | 2 + .../modules/analysisd/__init__.py | 5 + .../modules/aws/event_monitor.py | 2 +- .../modules/integratord/__init__.py | 35 +- .../modules/integratord/event_monitor.py | 103 ++++- .../wazuh_testing/modules/sca/__init__.py | 11 + .../modules/sca/event_monitor.py | 138 +++++++ .../wazuh_testing/qa_docs/schema.yaml | 4 + .../wazuh_testing/tools/__init__.py | 3 +- .../wazuh_testing/wazuh_testing/tools/file.py | 25 +- .../wazuh_testing/tools/services.py | 2 +- tests/integration/conftest.py | 80 +++- .../test_analysisd/test_os_exec.py | 2 +- .../test_config/test_cors/test_cors.py | 1 - .../test_max_upload_size.py | 4 +- .../test_authd_use_password_invalid.py | 3 +- tests/integration/test_aws/test_basic.py | 6 +- .../test_aws/test_discard_regex.py | 2 +- tests/integration/test_aws/test_log_groups.py | 2 +- .../test_aws/test_only_logs_after.py | 16 +- tests/integration/test_aws/test_parser.py | 16 +- tests/integration/test_aws/test_path.py | 2 +- .../integration/test_aws/test_path_suffix.py | 2 +- tests/integration/test_aws/test_regions.py | 6 +- .../test_aws/test_remove_from_bucket.py | 4 +- .../test_agent_auth_enrollment.py | 2 +- ...est_agentd_server_address_configuration.py | 4 +- .../test_basic_usage_realtime_unsupported.py | 4 +- .../test_file_limit_capacity_alerts.py | 2 +- .../test_file_limit_default.py | 2 +- .../test_file_limit_delete_full.py | 2 +- .../test_file_limit/test_file_limit_full.py | 2 +- .../test_file_limit_no_limit.py | 2 +- .../test_file_limit/test_file_limit_values.py | 2 +- .../test_files/test_max_eps/test_max_eps.py | 2 +- .../test_diff_size_limit_configured.py | 2 +- .../test_diff_size_limit_default.py | 2 +- .../test_disk_quota_default.py | 2 +- .../test_disk_quota_disabled.py | 2 +- .../test_file_size_default.py | 2 +- .../test_file_size_disabled.py | 2 +- .../test_file_size_values.py | 2 +- .../test_report_changes/test_large_changes.py | 2 +- .../test_report_changes_and_diff.py | 2 +- .../test_report_deleted_diff.py | 4 +- .../test_whodata_policy_change.py | 2 +- .../test_windows_system_folder_redirection.py | 2 +- ...basic_usage_registry_duplicated_entries.py | 2 +- .../test_registry_key_limit_full.py | 2 +- .../test_registry_limit_capacity_alerts.py | 2 +- .../test_registry_limit_values.py | 2 +- .../test_registry_value_limit_full.py | 2 +- .../test_synchronization/test_sync_overlap.py | 2 +- .../test_synchronization/test_sync_time.py | 2 +- .../test_configuration/test_invalid.py | 7 +- .../test_remote_configuration.py | 5 +- .../test_configuration/test_schedule.py | 10 +- .../test_functionality/test_day_wday.py | 92 ++--- .../test_functionality/test_interval.py | 6 +- .../test_functionality/test_logging.py | 13 +- .../test_functionality/test_max_messages.py | 3 +- .../test_functionality/test_pull_on_start.py | 2 +- .../test_functionality/test_rules.py | 4 +- .../integration/test_integratord/conftest.py | 19 +- .../configuration_alerts_reading.yaml} | 30 +- .../cases_integratord_change_inode_alert.yaml | 33 +- ..._integratord_read_invalid_json_alerts.yaml | 66 ++-- ...es_integratord_read_json_file_deleted.yaml | 17 - ...es_integratord_read_valid_json_alerts.yaml | 33 +- .../test_integratord/test_alerts_reading.py | 357 ++++++++++++++++++ .../test_integratord_change_inode_alert.py | 166 -------- .../test_integratord_read_json_alerts.py | 212 ----------- ...test_integratord_read_json_file_deleted.py | 154 -------- .../test_invalid_agent_localfile_config.py | 11 +- .../test_localfile/test_invalid_wazuh_conf.py | 12 +- .../test_ignore_regex.py | 2 +- .../test_restrict_ignore_regex.py | 2 +- .../test_restrict_regex.py | 2 +- .../test_macos_file_status_basic.py | 4 +- .../test_macos_file_status_predicate.py | 4 +- .../test_macos_file_status_when_no_macos.py | 7 +- .../test_macos/test_macos_format_basic.py | 6 +- .../test_macos_format_only_future_events.py | 4 +- .../test_macos/test_macos_multiline_values.py | 10 +- .../test_statistics/test_statistics_macos.py | 4 +- tests/integration/test_sca/conftest.py | 53 +++ .../configuration_sca.yaml | 30 ++ .../data/policies/cis_centos8_osregex.yaml | 56 +++ .../data/policies/cis_centos8_pcre2.yaml | 57 +++ .../cis_centos8_validate_remediation.yaml | 41 ++ .../data/test_cases/cases_sca_disabled.yaml | 9 + .../data/test_cases/cases_sca_enabled.yaml | 9 + .../data/test_cases/cases_scan_results.yaml | 21 ++ .../cases_validate_remediation.yaml | 25 ++ tests/integration/test_sca/test_basic.py | 194 ++++++++++ .../integration/test_sca/test_scan_results.py | 141 +++++++ .../test_sca/test_validate_remediation.py | 144 +++++++ .../test_cpe_helper/test_cpe_helper.py | 10 +- .../test_feeds/test_download_feeds.py | 11 +- .../test_feeds/test_msu_inventory.py | 2 +- tests/scans/code_analysis/conftest.py | 15 +- tests/scans/conftest.py | 10 +- tests/scans/dependencies/test_dependencies.py | 2 +- .../roles/master-role/tasks/main.yml | 2 +- .../roles/worker-role/tasks/main.yml | 2 +- .../roles/master-role/tasks/main.yml | 2 +- .../roles/worker-role/tasks/main.yml | 2 +- .../master-role/tasks/{main.yaml => main.yml} | 2 +- .../worker-role/tasks/{main.yaml => main.yml} | 2 +- .../test_firewall_alerts.py | 2 +- .../test_firewall_status.py | 2 +- .../test_agent_files_deletion.py | 4 +- .../test_agent_info_sync.py | 8 +- version.json | 4 +- 118 files changed, 1864 insertions(+), 902 deletions(-) create mode 100644 deps/wazuh_testing/wazuh_testing/modules/sca/__init__.py create mode 100644 deps/wazuh_testing/wazuh_testing/modules/sca/event_monitor.py rename tests/integration/test_integratord/data/{configuration_template/config_integratord_read_json_alerts.yaml => configuration/configuration_alerts_reading.yaml} (54%) delete mode 100644 tests/integration/test_integratord/data/test_cases/cases_integratord_read_json_file_deleted.yaml create mode 100644 tests/integration/test_integratord/test_alerts_reading.py delete mode 100644 tests/integration/test_integratord/test_integratord_change_inode_alert.py delete mode 100644 tests/integration/test_integratord/test_integratord_read_json_alerts.py delete mode 100644 tests/integration/test_integratord/test_integratord_read_json_file_deleted.py create mode 100644 tests/integration/test_sca/conftest.py create mode 100644 tests/integration/test_sca/data/configuration_template/configuration_sca.yaml create mode 100644 tests/integration/test_sca/data/policies/cis_centos8_osregex.yaml create mode 100644 tests/integration/test_sca/data/policies/cis_centos8_pcre2.yaml create mode 100644 tests/integration/test_sca/data/policies/cis_centos8_validate_remediation.yaml create mode 100644 tests/integration/test_sca/data/test_cases/cases_sca_disabled.yaml create mode 100644 tests/integration/test_sca/data/test_cases/cases_sca_enabled.yaml create mode 100644 tests/integration/test_sca/data/test_cases/cases_scan_results.yaml create mode 100644 tests/integration/test_sca/data/test_cases/cases_validate_remediation.yaml create mode 100644 tests/integration/test_sca/test_basic.py create mode 100644 tests/integration/test_sca/test_scan_results.py create mode 100644 tests/integration/test_sca/test_validate_remediation.py rename tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/{main.yaml => main.yml} (96%) rename tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/{main.yaml => main.yml} (96%) diff --git a/.github/workflows/changelog.yaml b/.github/workflows/changelog.yaml index 9baad5c564..7a3bd6eda3 100644 --- a/.github/workflows/changelog.yaml +++ b/.github/workflows/changelog.yaml @@ -4,6 +4,7 @@ on: types: - opened - ready_for_review + - synchronize branches: - master - '[0-9]+.[0-9]+' diff --git a/.github/workflows/code_analysis.yaml b/.github/workflows/code_analysis.yaml index 5419dcb918..b2838026b5 100644 --- a/.github/workflows/code_analysis.yaml +++ b/.github/workflows/code_analysis.yaml @@ -4,6 +4,7 @@ on: types: - opened - ready_for_review + - synchronize jobs: Linting: if: ${{ !github.event.pull_request.draft }} diff --git a/CHANGELOG.md b/CHANGELOG.md index b1320511a4..ab17ad264a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,10 +2,17 @@ All notable changes to this project will be documented in this file. +## [4.6.0] - TBD + +Wazuh commit: TBD \ +Release report: TBD + ## [4.5.0] - TBD -### Added +Wazuh commit: TBD \ +Release report: TBD +- New 'SCA' test suite and framework. ([#3566](https://github.com/wazuh/wazuh-qa/pull/3566)) \- (Framework + Tests) - Add integration tests for AWS module. ([#3911](https://github.com/wazuh/wazuh-qa/pull/3911)) \- (Framework + Tests + Documentation) - Add tests for msu patches with no associated CVE . ([#4009](https://github.com/wazuh/wazuh-qa/pull/4009)) \- (Framework + Tests) - Add tests with new options to avoid FIM synchronization overlapping. ([#3318](https://github.com/wazuh/wazuh-qa/pull/3318)) \- (Framework + tests) @@ -19,7 +26,8 @@ All notable changes to this project will be documented in this file. ### Changed -- Skip `test_authd_ssl_options` cases that use TLS 1.1 causing errors on several OpenSSL versions \- (Tests) +- Skip `test_authd_ssl_options` cases that use TLS 1.1 causing errors on several OpenSSL versions. ([#4229](https://github.com/wazuh/wazuh-qa/pull/4229)) \- (Tests) +- Replace embedded python invocations with generic `python3`. ([#4186](https://github.com/wazuh/wazuh-qa/pull/4186)) - (Tests) - Fix FIM test_large_changes test suite ([#3948](https://github.com/wazuh/wazuh-qa/pull/3948)) \- (Tests) - Update `get_test_cases_data` function so it handles fim_mode parameter ([#4185](https://github.com/wazuh/wazuh-qa/pull/4185)) \- (Framework) - Change FIM `regular_file_cud` and `EventChecker` file modification steps ([#4183](https://github.com/wazuh/wazuh-qa/pull/4183)) \- (Framework + Tests) @@ -47,27 +55,57 @@ All notable changes to this project will be documented in this file. - Fix imports and add windows support for test_report_changes_and_diff IT ([#3548](https://github.com/wazuh/wazuh-qa/issues/3548)) \- (Framework + Tests) - Fix a regex error in the FIM integration tests ([#3061](https://github.com/wazuh/wazuh-qa/issues/3061)) \- (Framework + Tests) - Fix an error in the cluster performance tests related to CSV parser ([#2999](https://github.com/wazuh/wazuh-qa/pull/2999)) \- (Framework + Tests) -- Fix bug in the framework on migration tool ([#2999](https://github.com/wazuh/wazuh-qa/pull/4027)) \- (Framework) +- Fix bug in the framework on migration tool ([#4027](https://github.com/wazuh/wazuh-qa/pull/4027)) \- (Framework) + +## [4.4.5] - TBD + +Wazuh commit: TBD \ +Release report: TBD + +## [4.4.4] - TBD + +Wazuh commit: TBD \ +Release report: TBD + +## Added + +- Change test_python_flaws.py to accept branch or commit in the same argument. ([#4209](https://github.com/wazuh/wazuh-qa/pull/4209)) (Tests) +- Fix test_dependencies.py for the changes in the feature. ([#4210](https://github.com/wazuh/wazuh-qa/pull/4210)) (Tests) + +### Fixed + +- Fix syscollector tests failure (get_configuration fixture has different scope) ([#4154](https://github.com/wazuh/wazuh-qa/pull/4154)) \- (Framework + Tests) ## [4.4.3] - 25-06-2023 -Wazuh commit: https://github.com/wazuh/wazuh/commit/f7080df56081adaeaad94529522233e2f0bbd577 +Wazuh commit: https://github.com/wazuh/wazuh/commit/f7080df56081adaeaad94529522233e2f0bbd577 \ Release report: https://github.com/wazuh/wazuh/issues/17198 +### Fixed + +- Fix missing comma in setup.py. ([#4180](https://github.com/wazuh/wazuh-qa/pull/4180)) (Framework) +- Changed the last uses of 4.4.2 in setup.py and schema.yaml. ([#4172](https://github.com/wazuh/wazuh-qa/pull/4172)) \- (Framework) + ## [4.4.2] - 18-05-2023 -Wazuh commit: https://github.com/wazuh/wazuh/commit/b2901d5086e7a073d89f4f72827e070ce3abd8e8 +Wazuh commit: https://github.com/wazuh/wazuh/commit/b2901d5086e7a073d89f4f72827e070ce3abd8e8 \ Release report: https://github.com/wazuh/wazuh/issues/17004 - ### Added - Add package support for system tests ([#3965](https://github.com/wazuh/wazuh-qa/pull/3966)) \- (Framework) - Add test to check the Syscollector configuration. ([#3584](https://github.com/wazuh/wazuh-qa/pull/3584)) \- (Framework + Tests) - Add system tests for groups deletion ([#4057](https://github.com/wazuh/wazuh-qa/pull/4057)) \- (Tests) +### Changed + +- Change integratord test to use slack instead of virustotal ([#3540](https://github.com/wazuh/wazuh-qa/pull/3540)) \- (Framework + Tests) + ### Fixed +- Stabilize multiple wday tests (GCloud integration) ([#4176](https://github.com/wazuh/wazuh-qa/pull/4176)) \- (Tests) +- Remove old XFail marker (API suite) ([#4177](https://github.com/wazuh/wazuh-qa/pull/4177)) \- (Tests) +- Mark VD download feeds test as xfail ([#4197](https://github.com/wazuh/wazuh-qa/pull/4197)) \- (Tests) - Skip test_age_datetime_changed ([#4182](https://github.com/wazuh/wazuh-qa/pull/4182)) \- (Tests) - Limit urllib3 major required version ([#4162](https://github.com/wazuh/wazuh-qa/pull/4162)) \- (Framework) - Fix daemons_handler fixture (fix GCP IT) ([#4134](https://github.com/wazuh/wazuh-qa/pull/4134)) \- (Tests) diff --git a/deps/wazuh_testing/setup.py b/deps/wazuh_testing/setup.py index 1dd1b2fe4e..f2b2c21098 100644 --- a/deps/wazuh_testing/setup.py +++ b/deps/wazuh_testing/setup.py @@ -1,6 +1,7 @@ # Copyright (C) 2015-2021, Wazuh Inc. # Created by Wazuh, Inc. . # This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 +import json from setuptools import setup, find_packages import os @@ -43,7 +44,7 @@ 'wazuh-log-metrics=wazuh_testing.scripts.wazuh_log_metrics:main', 'qa-docs=wazuh_testing.scripts.qa_docs:main', 'qa-ctl=wazuh_testing.scripts.qa_ctl:main', - 'check-files=wazuh_testing.scripts.check_files:main' + 'check-files=wazuh_testing.scripts.check_files:main', 'add-agents-client-keys=wazuh_testing.scripts.add_agents_client_keys:main', 'unsync-agents=wazuh_testing.scripts.unsync_agents:main', 'stress_results_comparator=wazuh_testing.scripts.stress_results_comparator:main' @@ -58,11 +59,21 @@ def get_files_from_directory(directory): return paths +def get_version(): + script_path = os.path.dirname(__file__) + rel_path = "../../version.json" + abs_file_path = os.path.join(script_path, rel_path) + f = open(abs_file_path) + data = json.load(f) + version = data['version'] + return version + + package_data_list.extend(get_files_from_directory('wazuh_testing/qa_docs/search_ui')) setup( name='wazuh_testing', - version='4.5.0', + version=get_version(), description='Wazuh testing utilities to help programmers automate tests', url='https://github.com/wazuh', author='Wazuh', diff --git a/deps/wazuh_testing/wazuh_testing/__init__.py b/deps/wazuh_testing/wazuh_testing/__init__.py index bf72e1e136..43ce0de45b 100644 --- a/deps/wazuh_testing/wazuh_testing/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/__init__.py @@ -40,6 +40,7 @@ API_JSON_LOG_FILE_PATH = os.path.join(WAZUH_PATH, 'logs', 'api.json') API_LOG_FOLDER = os.path.join(WAZUH_PATH, 'logs', 'api') WAZUH_TESTING_PATH = os.path.dirname(os.path.abspath(__file__)) +CIS_RULESET_PATH = os.path.join(WAZUH_PATH, 'ruleset', 'sca') WAZUH_TESTING_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') DEFAULT_AUTHD_PASS_PATH = os.path.join(WAZUH_PATH, 'etc', 'authd.pass') TEMPLATE_DIR = 'configuration_template' @@ -79,6 +80,7 @@ # Local internal options +MODULESD_DEBUG = 'wazuh_modules.debug' WINDOWS_DEBUG = 'windows.debug' SYSCHECK_DEBUG = 'syscheck.debug' VERBOSE_DEBUG_OUTPUT = 2 diff --git a/deps/wazuh_testing/wazuh_testing/modules/analysisd/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/analysisd/__init__.py index dd2a5835b3..351128ac38 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/analysisd/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/analysisd/__init__.py @@ -8,3 +8,8 @@ 'analysisd.sca_threads': '1', 'analysisd.hostinfo_threads': '1', 'analysisd.winevt_threads': '1', 'analysisd.rule_matching_threads': '1', 'analysisd.dbsync_threads': '1', 'remoted.worker_pool': '1'} +# Callback Messages +CB_ANALYSISD_STARTUP_COMPLETED = fr"DEBUG: Startup completed. Waiting for new messages.*" + +# Error messages +ERR_MSG_STARTUP_COMPLETED_NOT_FOUND = fr'Did not recieve the expected "{CB_ANALYSISD_STARTUP_COMPLETED}"' diff --git a/deps/wazuh_testing/wazuh_testing/modules/aws/event_monitor.py b/deps/wazuh_testing/wazuh_testing/modules/aws/event_monitor.py index 757481ee78..f3c9c89788 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/aws/event_monitor.py +++ b/deps/wazuh_testing/wazuh_testing/modules/aws/event_monitor.py @@ -79,7 +79,7 @@ def callback_detect_aws_module_warning(line): Optional[str]: Line if it matches. """ - if re.match(r".*WARNING: No buckets or services definitions found at module 'aws-s3'.", line): + if re.match(r".*WARNING: No buckets, services or subscribers definitions found at module 'aws-s3'.", line): return line diff --git a/deps/wazuh_testing/wazuh_testing/modules/integratord/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/integratord/__init__.py index a417bbf7a5..5895a9aa26 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/integratord/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/modules/integratord/__init__.py @@ -3,29 +3,18 @@ Created by Wazuh, Inc. . This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 ''' +from wazuh_testing.tools import ANALYSISD_DAEMON, DB_DAEMON, INTEGRATOR_DAEMON # Variables -INTEGRATORD_PREFIX = 'wazuh-integratord' +INTEGRATORD_PREFIX = fr".+{INTEGRATOR_DAEMON}" +REQUIRED_DAEMONS = [INTEGRATOR_DAEMON, DB_DAEMON, ANALYSISD_DAEMON] +TIME_TO_DETECT_FILE = 2 -# Callback Messages -CB_VIRUSTOTAL_ENABLED = r".*(wazuh-integratord.*Enabling integration for: 'virustotal').*" -CB_INTEGRATORD_SENDING_ALERT = r'(.*wazuh-integratord.*DEBUG: sending new alert).*' -CB_PROCESSING_ALERT = r'.*(wazuh-integratord.*Processing alert.*)' -CB_INTEGRATORD_THREAD_READY = r'.*(wazuh-integratord.*DEBUG: Local requests thread ready).*' -CB_VIRUSTOTAL_ALERT = r'.*(wazuh-integratord.*alert_id.*\"integration\": \"virustotal\").*' -CB_VIRUSTOTAL_ALERT_JSON = r'.*(VirusTotal: Alert.*\"integration\":\"virustotal\").*' -CB_INVALID_JSON_ALERT_READ = r'.*(wazuh-integratord.*WARNING: Invalid JSON alert read).*' -CB_OVERLONG_JSON_ALERT_READ = r'.*(wazuh-integratord.*WARNING: Overlong JSON alert read).*' -CB_ALERTS_FILE_INODE_CHANGED = r'.*(wazuh-integratord.*DEBUG: jqueue_next.*Alert file inode changed).*' -CB_CANNOT_RETRIEVE_JSON_FILE = r'.*(wazuh-integratord.*WARNING.*Could not retrieve information of file).*'\ - r'alerts\.json.*No such file.*' - -# Error messages -ERR_MSG_VIRUST_TOTAL_ENABLED_NOT_FOUND = r'Did not recieve the expected "Enabling integration for virustotal"' -ERR_MSG_VIRUSTOTAL_ALERT_NOT_DETECTED = r'Did not recieve the expected VirusTotal alert in alerts.json' -ERR_MSG_INVALID_ALERT_NOT_FOUND = r'Did not recieve the expected "...Invalid JSON alert read..." event' -ERR_MSG_OVERLONG_ALERT_NOT_FOUND = r'Did not recieve the expected "...Overlong JSON alert read..." event' -ERR_MSG_ALERT_INODE_CHANGED_NOT_FOUND = r'Did not recieve the expected "...Alert file inode changed..." event' -ERR_MSG_CANNOT_RETRIEVE_MSG_NOT_FOUND = r'Did not recieve the expected "...Could not retrieve information/open file"' -ERR_MSG_SENDING_ALERT_NOT_FOUND = r'Did not recieve the expected "...sending new alert" event' -ERR_MSG_PROCESSING_ALERT_NOT_FOUND = r'Did not recieve the expected "...Procesing alert" event' +# Callbacks +CB_INVALID_ALERT_READ = r'.*WARNING: Invalid JSON alert read.*' +CB_OVERLONG_ALERT_READ = r'.*WARNING: Overlong JSON alert read.*' +CB_ALERT_JSON_FILE_NOT_FOUND = r'.+WARNING.*Could not retrieve information of file.*alerts\.json.*No such file.*' +CB_THIRD_PARTY_RESPONSE = r'.*' +CB_PROCESSING_ALERT = r'.*Processing alert.*' +CB_INODE_CHANGED = r'.*DEBUG: jqueue_next.*Alert file inode changed.*' +CB_INTEGRATORD_THREAD_IS_READY = r'.*DEBUG: Local requests thread ready.*' diff --git a/deps/wazuh_testing/wazuh_testing/modules/integratord/event_monitor.py b/deps/wazuh_testing/wazuh_testing/modules/integratord/event_monitor.py index 0e9697ab2c..e3682e886d 100644 --- a/deps/wazuh_testing/wazuh_testing/modules/integratord/event_monitor.py +++ b/deps/wazuh_testing/wazuh_testing/modules/integratord/event_monitor.py @@ -3,14 +3,39 @@ Created by Wazuh, Inc. . This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 ''' +import re +from wazuh_testing import T_5, T_20, T_30 from wazuh_testing.tools import LOG_FILE_PATH from wazuh_testing.tools.monitoring import FileMonitor +from wazuh_testing.modules import integratord + + +def make_integratord_callback(pattern, prefix=integratord.INTEGRATORD_PREFIX): + '''Create a callback function from a text pattern. + + It already contains the vulnerability-detector prefix. + + Args: + pattern (str): String to match on the log. + prefix (str): regular expression used as prefix before the pattern. + + Returns: + lambda: function that returns if there's a match in the file + + Examples: + >>> callback_bionic_update_started = make_vuln_callback("Starting Ubuntu Bionic database update") + ''' + pattern = r'\s+'.join(pattern.split()) + regex = re.compile(r'{}{}'.format(prefix, pattern)) + + return lambda line: regex.match(line) is not None def check_integratord_event(file_monitor=None, callback='', error_message=None, update_position=True, - timeout=30, accum_results=1, file_to_monitor=LOG_FILE_PATH): - """Check if an event occurs + timeout=T_30, accum_results=1, file_to_monitor=LOG_FILE_PATH, + prefix=integratord.INTEGRATORD_PREFIX): + '''Check if an event occurs Args: file_monitor (FileMonitor): FileMonitor object to monitor the file content. callback (str): log regex to check in Wazuh log @@ -18,10 +43,80 @@ def check_integratord_event(file_monitor=None, callback='', error_message=None, update_position (boolean): filter configuration parameter to search in Wazuh log timeout (str): timeout to check the event in Wazuh log accum_results (int): Accumulation of matches. - """ + prefix (str): log pattern regex + ''' file_monitor = FileMonitor(file_to_monitor) if file_monitor is None else file_monitor error_message = f"Could not find this event in {file_to_monitor}: {callback}" if error_message is None else \ error_message file_monitor.start(timeout=timeout, update_position=update_position, accum_results=accum_results, - callback=callback, error_message=error_message) + callback=make_integratord_callback(callback, prefix), error_message=error_message) + + +def check_integratord_thread_ready(file_monitor=None, timeout=T_5): + '''Check if a local requests thread ready in the logs. + + Args: + log_monitor (FileMonitor): Log monitor. + timeout (int): Event timeout. + ''' + check_integratord_event(file_monitor=file_monitor, timeout=timeout, + callback=integratord.CB_INTEGRATORD_THREAD_IS_READY, + error_message='Did not receive the expected "Local requests thread ready"') + + +def check_file_inode_changed(file_monitor=None, timeout=T_20): + '''Check for Alert file inode changed in the logs. + + Args: + log_monitor (FileMonitor): Log monitor. + timeout (int): Event timeout. + ''' + check_integratord_event(file_monitor=file_monitor, timeout=timeout, callback=integratord.CB_INODE_CHANGED, + error_message='Did not receive the expected "...Alert file inode changed..." event') + + +def check_alert_processing(file_monitor=None, timeout=T_20): + '''Check for Processing alert message in the logs. + + Args: + log_monitor (FileMonitor): Log monitor. + timeout (int): Event timeout. + ''' + check_integratord_event(file_monitor=file_monitor, timeout=timeout, callback=integratord.CB_PROCESSING_ALERT, + error_message='Did not receive the expected Slack alert in alerts.json') + + +def check_third_party_response(file_monitor=None, timeout=T_20): + '''Check for response message in the logs. + + Args: + log_monitor (FileMonitor): Log monitor. + timeout (int): Event timeout. + ''' + check_integratord_event(file_monitor=file_monitor, timeout=timeout, callback=integratord.CB_THIRD_PARTY_RESPONSE, + error_message='Could not send the alert to slack') + + +def check_invalid_alert_read(file_monitor=None, timeout=T_20, callback='', error_message=None): + '''Check for invalid alert read message in the logs. + + Args: + log_monitor (FileMonitor): Log monitor. + timeout (int): Event timeout. + callback (str): log regex to check in Wazuh log + error_message (str): error message to show in case of expected event does not occur + ''' + check_integratord_event(file_monitor=file_monitor, timeout=timeout, callback=callback, error_message=error_message) + + +def check_file_warning(file_monitor=None, timeout=T_20): + '''Check for information of file in the logs. + + Args: + log_monitor (FileMonitor): Log monitor. + timeout (int): Event timeout. + ''' + check_integratord_event(file_monitor=file_monitor, timeout=timeout, + callback=integratord.CB_ALERT_JSON_FILE_NOT_FOUND, + error_message='Did not receive the expected "...Could not retrieve information/open file"') diff --git a/deps/wazuh_testing/wazuh_testing/modules/sca/__init__.py b/deps/wazuh_testing/wazuh_testing/modules/sca/__init__.py new file mode 100644 index 0000000000..7b51bea795 --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/modules/sca/__init__.py @@ -0,0 +1,11 @@ +# Copyright (C) 2015-2023, Wazuh Inc. +# Created by Wazuh, Inc. . +# This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +from wazuh_testing import MODULESD_DEBUG, VERBOSE_DEBUG_OUTPUT + +# Variables +TEMP_FILE_PATH = '/tmp' + +# Setting Local_internal_option file +SCA_DEFAULT_LOCAL_INTERNAL_OPTIONS = {MODULESD_DEBUG: str(VERBOSE_DEBUG_OUTPUT)} \ No newline at end of file diff --git a/deps/wazuh_testing/wazuh_testing/modules/sca/event_monitor.py b/deps/wazuh_testing/wazuh_testing/modules/sca/event_monitor.py new file mode 100644 index 0000000000..f25a9d95fb --- /dev/null +++ b/deps/wazuh_testing/wazuh_testing/modules/sca/event_monitor.py @@ -0,0 +1,138 @@ +# Copyright (C) 2015-2023, Wazuh Inc. +# Created by Wazuh, Inc. . +# This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +import re +import json + +from wazuh_testing import T_60, T_10, T_20 +from wazuh_testing.tools import LOG_FILE_PATH +from wazuh_testing.modules import sca +from wazuh_testing.tools.monitoring import FileMonitor, generate_monitoring_callback + +# Callback Messages +CB_SCA_ENABLED = r".*sca.*INFO: (Module started.)" +CB_SCA_DISABLED = r".*sca.*INFO: (Module disabled). Exiting." +CB_SCA_SCAN_STARTED = r".*sca.*INFO: (Starting Security Configuration Assessment scan)." +CB_SCA_SCAN_ENDED = r".*sca.*INFO: Security Configuration Assessment scan finished. Duration: (\d+) seconds." +CB_SCA_OSREGEX_ENGINE = r".*sca.*DEBUG: SCA will use '(.*)' engine to check the rules." +CB_POLICY_EVALUATION_FINISHED = r".*sca.*INFO: Evaluation finished for policy '(.*)'." +CB_SCAN_DB_DUMP_FINISHED = r".*sca.*DEBUG: Finished dumping scan results to SCA DB for policy '(.*)'.*" +CB_SCAN_RULE_RESULT = r".*sca.*wm_sca_hash_integrity.*DEBUG: ID: (\d+); Result: '(.*)'" +CB_SCA_SCAN_EVENT = r".*sca_send_alert.*Sending event: (.*)" + + +# Error Messages +ERR_MSG_REGEX_ENGINE = "Did not receive the expected 'SCA will use '.*' engine to check the rules' event" +ERR_MSG_ID_RESULTS = 'Expected sca_has_integrity result events not found' +ERR_MSG_SCA_SUMMARY = 'Expected SCA Scan Summary type event not found.' + + +# Callback functions +def callback_scan_id_result(line): + '''Callback that returns the ID an result of a SCA check + Args: + line (str): line string to check for match. + ''' + match = re.match(CB_SCAN_RULE_RESULT, line) + if match: + return [match.group(1), match.group(2)] + + +def callback_detect_sca_scan_summary(line): + '''Callback that return the json from a SCA summary event. + Args: + line (str): line string to check for match. + ''' + match = re.match(CB_SCA_SCAN_EVENT, line) + if match: + if json.loads(match.group(1))['type'] == 'summary': + return json.loads(match.group(1)) + + +# Event check functions +def check_sca_event(file_monitor=None, callback='.*', error_message=None, update_position=False, + timeout=T_60, accum_results=1, file_to_monitor=LOG_FILE_PATH): + """Check if a sca event occurs + + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + callback (str): log regex to check in Wazuh log + error_message (str): error message to show in case of expected event does not occur + update_position (boolean): filter configuration parameter to search in Wazuh log + timeout (str): timeout to check the event in Wazuh log + accum_results (int): Accumulation of matches. + file_to_monitor (str): Path of the file where to check for the expected events + """ + file_monitor = FileMonitor(file_to_monitor) if file_monitor is None else file_monitor + error_message = f"Expected event to found in {file_to_monitor}: {callback}" if error_message is None else \ + error_message + + file_monitor.start(timeout=timeout, update_position=update_position, accum_results=accum_results, + callback=generate_monitoring_callback(callback), error_message=error_message) + + +def check_sca_enabled(file_monitor=None): + """Check if the sca module is enabled + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + """ + check_sca_event(callback=CB_SCA_ENABLED, timeout=T_10, file_monitor=file_monitor) + + +def check_sca_disabled(file_monitor=None): + """Check if the sca module is disabled + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + """ + check_sca_event(callback=CB_SCA_DISABLED, timeout=T_10, file_monitor=file_monitor) + + +def check_sca_scan_started(file_monitor=None): + """Check if the sca scan has started + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + """ + check_sca_event(callback=CB_SCA_SCAN_STARTED, timeout=T_10, file_monitor=file_monitor) + + +def check_sca_scan_ended(file_monitor=None): + """Check if the sca scan has ended + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + """ + check_sca_event(callback=CB_SCA_SCAN_ENDED, timeout=T_10, file_monitor=file_monitor) + + +def get_scan_regex_engine(file_monitor=None): + """Check returns the regex engine used on a SCA scan. + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + """ + file_monitor = FileMonitor(LOG_FILE_PATH) if file_monitor is None else file_monitor + engine = file_monitor.start(callback=generate_monitoring_callback(CB_SCA_OSREGEX_ENGINE), timeout=T_10, + error_message=ERR_MSG_REGEX_ENGINE, update_position=False).result() + return engine + + +def get_sca_scan_rule_id_results(file_monitor=None, results_num=1): + """Check the expected ammount of check results have been recieved + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + results_num (int): Ammount of rule check results that should be recieved. + """ + file_monitor = FileMonitor(LOG_FILE_PATH) if file_monitor is None else file_monitor + results = file_monitor.start(callback=callback_scan_id_result, timeout=T_20, accum_results=results_num, + error_message=ERR_MSG_ID_RESULTS).result() + return results + + +def get_sca_scan_summary(file_monitor=None): + """Get the scan summary event + Args: + file_monitor (FileMonitor): FileMonitor object to monitor the file content. + """ + file_monitor = FileMonitor(LOG_FILE_PATH) if file_monitor is None else file_monitor + results = file_monitor.start(callback=callback_detect_sca_scan_summary, timeout=T_20, + error_message=ERR_MSG_SCA_SUMMARY).result() + return results diff --git a/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml b/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml index 9faacf05dd..718ee598ae 100644 --- a/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml +++ b/deps/wazuh_testing/wazuh_testing/qa_docs/schema.yaml @@ -183,7 +183,11 @@ predefined_values: - 4.2.0 - 4.3.0 - 4.4.0 + - 4.4.1 + - 4.4.2 + - 4.4.3 - 4.5.0 + - 4.6.0 tags: - active_response - agentd diff --git a/deps/wazuh_testing/wazuh_testing/tools/__init__.py b/deps/wazuh_testing/wazuh_testing/tools/__init__.py index ac69de8484..22154bf6c6 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/__init__.py +++ b/deps/wazuh_testing/wazuh_testing/tools/__init__.py @@ -178,7 +178,8 @@ def get_service(): MODULESD_CONTROL_SOCKET_PATH, MODULESD_KREQUEST_SOCKET_PATH ], - 'wazuh-clusterd': [MODULESD_C_INTERNAL_SOCKET_PATH] + 'wazuh-clusterd': [MODULESD_C_INTERNAL_SOCKET_PATH], + 'wazuh-integratord': [] } # These sockets do not exist with default Wazuh configuration diff --git a/deps/wazuh_testing/wazuh_testing/tools/file.py b/deps/wazuh_testing/wazuh_testing/tools/file.py index 0cf497770a..4faeeded2e 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/file.py +++ b/deps/wazuh_testing/wazuh_testing/tools/file.py @@ -1,4 +1,4 @@ -# Copyright (C) 2015-2022, Wazuh Inc. +# Copyright (C) 2015-2023, Wazuh Inc. # Created by Wazuh, Inc. . # This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 import bz2 @@ -286,6 +286,29 @@ def remove_file(file_path): delete_path_recursively(file_path) +def copy_files_in_folder(src_folder, dst_folder='/tmp', files_to_move=None): + """Copy files from a folder to target folder + Args: + src_folder (str): directory path from where to copy files. + dst_folder (str): directory path where files will be copied to. + files_to_move (list): List with files to move copy from a folder. + """ + file_list = [] + if os.path.isdir(src_folder): + if files_to_move is None: + for file in os.listdir(src_folder): + file_list.append(file) + copy(os.path.join(src_folder, file), dst_folder) + remove_file(os.path.join(src_folder, file)) + else: + for file in files_to_move: + if os.path.isfile(os.path.join(src_folder, file)): + file_list.append(file) + copy(os.path.join(src_folder, file), dst_folder) + remove_file(os.path.join(src_folder, file)) + return file_list + + def modify_all_files_in_folder(folder_path, data): """Write data into all files in a folder Args: diff --git a/deps/wazuh_testing/wazuh_testing/tools/services.py b/deps/wazuh_testing/wazuh_testing/tools/services.py index cb93102270..546a97ea1c 100644 --- a/deps/wazuh_testing/wazuh_testing/tools/services.py +++ b/deps/wazuh_testing/wazuh_testing/tools/services.py @@ -5,8 +5,8 @@ import subprocess import sys import time - import psutil + from wazuh_testing.tools import WAZUH_PATH, get_service, WAZUH_SOCKETS, QUEUE_DB_PATH, WAZUH_OPTIONAL_SOCKETS from wazuh_testing.tools.configuration import write_wazuh_conf from wazuh_testing.modules import WAZUH_SERVICES_START, WAZUH_SERVICES_STOP diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 42741892d4..2881f40642 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -124,11 +124,39 @@ def restart_wazuh_daemon_function(daemon=None): @pytest.fixture(scope='function') -def restart_wazuh_function(daemon=None): - """Restart all Wazuh daemons""" - control_service("restart", daemon=daemon) +def restart_wazuh_function(request): + """Restart before starting a test, and stop it after finishing. + + Args: + request (fixture): Provide information on the executing test function. + """ + # If there is a list of required daemons defined in the test module, restart daemons, else restart all daemons. + try: + daemons = request.module.REQUIRED_DAEMONS + except AttributeError: + daemons = [] + + if len(daemons) == 0: + logger.debug(f"Restarting all daemon") + control_service('restart') + else: + for daemon in daemons: + logger.debug(f"Restarting {daemon}") + # Restart daemon instead of starting due to legacy used fixture in the test suite. + control_service('restart', daemon=daemon) + yield - control_service('stop', daemon=daemon) + + # Stop all daemons by default (daemons = None) + if len(daemons) == 0: + logger.debug(f"Stopping all daemons") + control_service('stop') + else: + # Stop a list daemons in order (as Wazuh does) + daemons.reverse() + for daemon in daemons: + logger.debug(f"Stopping {daemon}") + control_service('stop', daemon=daemon) @pytest.fixture(scope='module') @@ -302,12 +330,12 @@ def pytest_addoption(parser): help="run tests using a specific WPK package path" ) parser.addoption( - "--integration-api-key", + "--slack-webhook-url", action="store", - metavar="integration_api_key", + metavar="slack_webhook_url", default=None, type=str, - help="pass api key required for integratord tests." + help="pass webhook url required for integratord tests." ) @@ -366,10 +394,10 @@ def pytest_configure(config): # Set WPK package version global_parameters.wpk_version = config.getoption("--wpk_version") - # Set integration_api_key if it is passed through command line args - integration_api_key = config.getoption("--integration-api-key") - if integration_api_key: - global_parameters.integration_api_key = integration_api_key + # Set slack_webhook_url if it is passed through command line args + slack_webhook_url = config.getoption("--slack-webhook-url") + if slack_webhook_url: + global_parameters.slack_webhook_url = slack_webhook_url # Set files to add to the HTML report set_report_files(config.getoption("--save-file")) @@ -604,7 +632,7 @@ def configure_local_internal_options_module(request): conf.set_local_internal_options_dict(backup_local_internal_options) -@pytest.fixture(scope='function') +@pytest.fixture() def configure_local_internal_options_function(request): """Fixture to configure the local internal options file. @@ -905,10 +933,8 @@ def create_file_structure_function(get_files_list): delete_file_structure(get_files_list) -@pytest.fixture(scope='module') -def daemons_handler(get_configuration, request): - """Handler of Wazuh daemons. - +def daemons_handler_impl(request): + """Helper function to handle Wazuh daemons. It uses `daemons_handler_configuration` of each module in order to configure the behavior of the fixture. The `daemons_handler_configuration` should be a dictionary with the following keys: daemons (list, optional): List with every daemon to be used by the module. In case of empty a ValueError @@ -918,7 +944,6 @@ def daemons_handler(get_configuration, request): in order to use this fixture along with invalid configuration. Default `False` Args: - get_configuration (fixture): Get configurations from the module. Allows this fixture to be used for each param. request (fixture): Provide information on the executing test function. """ daemons = [] @@ -976,8 +1001,25 @@ def daemons_handler(get_configuration, request): control_service('stop', daemon=daemon) -# Wrapper of `daemons_handler` function to change its scope from `module` to `function` -daemons_handler_function = pytest.fixture(daemons_handler.__wrapped__, scope='function') +@pytest.fixture(scope='module') +def daemons_handler_module(get_configuration, request): + """Wrapper of `daemons_handler_impl` which contains the general implementation. + + Args: + get_configuration (fixture): Get configurations from the module. Allows this fixture to be used for each param. + request (fixture): Provide information on the executing test function. + """ + yield from daemons_handler_impl(request) + + +@pytest.fixture(scope='function') +def daemons_handler_function(request): + """Wrapper of `daemons_handler_impl` which contains the general implementation. + + Args: + request (fixture): Provide information on the executing test function. + """ + yield from daemons_handler_impl(request) @pytest.fixture(scope='function') diff --git a/tests/integration/test_active_response/test_analysisd/test_os_exec.py b/tests/integration/test_active_response/test_analysisd/test_os_exec.py index 0f0e507ef5..d6ef48aeb2 100644 --- a/tests/integration/test_active_response/test_analysisd/test_os_exec.py +++ b/tests/integration/test_active_response/test_analysisd/test_os_exec.py @@ -11,7 +11,7 @@ to an agent from the threat source when certain criteria are met. These tests will check if the 'wazuh-analysisd' daemon processes 'active response' messages correctly. -components: +components: - active_response suite: analysisd diff --git a/tests/integration/test_api/test_config/test_cors/test_cors.py b/tests/integration/test_api/test_config/test_cors/test_cors.py index 7fd73ef4b9..c478035da3 100644 --- a/tests/integration/test_api/test_config/test_cors/test_cors.py +++ b/tests/integration/test_api/test_config/test_cors/test_cors.py @@ -76,7 +76,6 @@ def get_configuration(request): # Tests -@pytest.mark.xfail(reason="Error fixed in this issue: https://github.com/wazuh/wazuh/issues/8485") @pytest.mark.parametrize('origin, tags_to_apply', [ ('https://test_url.com', {'cors'}), ('http://other_url.com', {'cors'}), diff --git a/tests/integration/test_api/test_config/test_max_upload_size/test_max_upload_size.py b/tests/integration/test_api/test_config/test_max_upload_size/test_max_upload_size.py index c2ca7cf954..d6fa860c24 100644 --- a/tests/integration/test_api/test_config/test_max_upload_size/test_max_upload_size.py +++ b/tests/integration/test_api/test_config/test_max_upload_size/test_max_upload_size.py @@ -155,7 +155,7 @@ def create_cdb_list(min_length): {'test_upload_size'} ]) def test_max_upload_size(tags_to_apply, get_configuration, configure_api_environment, restart_required_api_wazuh, - file_monitoring, daemons_handler, wait_for_start, get_api_details): + file_monitoring, daemons_handler_module, wait_for_start, get_api_details): ''' description: Check if a '413' HTTP status code ('Payload Too Large') is returned if the response body is bigger than the value of the 'max_upload_size' tag. For this purpose, the test will call to @@ -183,7 +183,7 @@ def test_max_upload_size(tags_to_apply, get_configuration, configure_api_environ - file_monitoring: type: fixture brief: Handle the monitoring of a specified file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. - wait_for_start: diff --git a/tests/integration/test_authd/test_authd_use_password_invalid.py b/tests/integration/test_authd/test_authd_use_password_invalid.py index d9761ab1c2..4ff86a3458 100644 --- a/tests/integration/test_authd/test_authd_use_password_invalid.py +++ b/tests/integration/test_authd/test_authd_use_password_invalid.py @@ -94,8 +94,7 @@ def test_authd_use_password_invalid(metadata, configuration, truncate_monitored_ to come from the cases yaml, this is done this way to handle easily the different error logs that could be raised from different inputs. - wazuh_min_version: - 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_aws/test_basic.py b/tests/integration/test_aws/test_basic.py index 0fd305f997..2db970b574 100644 --- a/tests/integration/test_aws/test_basic.py +++ b/tests/integration/test_aws/test_basic.py @@ -50,7 +50,7 @@ def test_bucket_defaults( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -146,7 +146,7 @@ def test_service_defaults( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -248,7 +248,7 @@ def test_inspector_defaults( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_discard_regex.py b/tests/integration/test_aws/test_discard_regex.py index 0b41bb37c2..f18b783061 100644 --- a/tests/integration/test_aws/test_discard_regex.py +++ b/tests/integration/test_aws/test_discard_regex.py @@ -52,7 +52,7 @@ def test_discard_regex( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_log_groups.py b/tests/integration/test_aws/test_log_groups.py index fc3a84680a..2002763b15 100644 --- a/tests/integration/test_aws/test_log_groups.py +++ b/tests/integration/test_aws/test_log_groups.py @@ -59,7 +59,7 @@ def test_log_groups( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_only_logs_after.py b/tests/integration/test_aws/test_only_logs_after.py index 50d67e894f..0eb73afcbc 100644 --- a/tests/integration/test_aws/test_only_logs_after.py +++ b/tests/integration/test_aws/test_only_logs_after.py @@ -67,7 +67,7 @@ def test_bucket_without_only_logs_after( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -188,7 +188,7 @@ def test_service_without_only_logs_after( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -302,7 +302,7 @@ def test_bucket_with_only_logs_after( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -422,7 +422,7 @@ def test_cloudwatch_with_only_logs_after( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -540,7 +540,7 @@ def test_inspector_with_only_logs_after( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -657,7 +657,7 @@ def test_bucket_multiple_calls( - teardown: - Delete the `s3_cloudtrail.db`. - Delete the uploaded files. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - metadata: type: dict @@ -761,7 +761,7 @@ def test_inspector_multiple_calls( were processed, there were no duplicates. - teardown: - Delete the `aws_services.db`. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - metadata: type: dict @@ -848,7 +848,7 @@ def test_cloudwatch_multiple_calls( - teardown: - Delete the `aws_services.db`. - Delete the uploaded files. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - metadata: type: dict diff --git a/tests/integration/test_aws/test_parser.py b/tests/integration/test_aws/test_parser.py index 25011f87c9..05bc64e482 100644 --- a/tests/integration/test_aws/test_parser.py +++ b/tests/integration/test_aws/test_parser.py @@ -50,7 +50,7 @@ def test_bucket_and_service_missing( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -130,7 +130,7 @@ def test_type_missing_in_bucket( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -201,7 +201,7 @@ def test_type_missing_in_service( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -273,7 +273,7 @@ def test_empty_values_in_bucket( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -344,7 +344,7 @@ def test_empty_values_in_service( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -416,7 +416,7 @@ def test_invalid_values_in_bucket( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -487,7 +487,7 @@ def test_invalid_values_in_service( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -558,7 +558,7 @@ def test_multiple_bucket_and_service_tags( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_path.py b/tests/integration/test_aws/test_path.py index 6d377217ed..d9f0d1a5a4 100644 --- a/tests/integration/test_aws/test_path.py +++ b/tests/integration/test_aws/test_path.py @@ -57,7 +57,7 @@ def test_path( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_path_suffix.py b/tests/integration/test_aws/test_path_suffix.py index f9597edb34..c789e34e33 100644 --- a/tests/integration/test_aws/test_path_suffix.py +++ b/tests/integration/test_aws/test_path_suffix.py @@ -57,7 +57,7 @@ def test_path_suffix( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_regions.py b/tests/integration/test_aws/test_regions.py index 920dca3253..fb041acce4 100644 --- a/tests/integration/test_aws/test_regions.py +++ b/tests/integration/test_aws/test_regions.py @@ -64,7 +64,7 @@ def test_regions( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -201,7 +201,7 @@ def test_cloudwatch_regions( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -341,7 +341,7 @@ def test_inspector_regions( - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Delete the uploaded file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_aws/test_remove_from_bucket.py b/tests/integration/test_aws/test_remove_from_bucket.py index 8bddca8fb1..8729ef4297 100644 --- a/tests/integration/test_aws/test_remove_from_bucket.py +++ b/tests/integration/test_aws/test_remove_from_bucket.py @@ -52,7 +52,7 @@ def test_remove_from_bucket( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict @@ -156,7 +156,7 @@ def test_remove_log_stream( - teardown: - Truncate wazuh logs. - Restore initial configuration, both ossec.conf and local_internal_options.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configuration: type: dict diff --git a/tests/integration/test_enrollment/test_agent_auth_enrollment.py b/tests/integration/test_enrollment/test_agent_auth_enrollment.py index 141e732a44..af2cd692aa 100644 --- a/tests/integration/test_enrollment/test_agent_auth_enrollment.py +++ b/tests/integration/test_enrollment/test_agent_auth_enrollment.py @@ -92,7 +92,7 @@ def test_agent_auth_enrollment(configure_environment, shutdown_agentd, get_curre error log. Agent-auth will be executed using the different parameters and with different keys and password files scenarios as described in the test cases." - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 0 diff --git a/tests/integration/test_enrollment/test_agentd_server_address_configuration.py b/tests/integration/test_enrollment/test_agentd_server_address_configuration.py index c3687b28c6..b75fb0e3f7 100644 --- a/tests/integration/test_enrollment/test_agentd_server_address_configuration.py +++ b/tests/integration/test_enrollment/test_agentd_server_address_configuration.py @@ -145,7 +145,7 @@ def clean_client_keys(get_configuration): def test_agentd_server_address_configuration(configure_local_internal_options_module, clean_client_keys, get_configuration, configure_environment, configure_sockets_environment, configure_socket_listener, - create_certificates, edit_hosts, daemons_handler, file_monitoring): + create_certificates, edit_hosts, daemons_handler_module, file_monitoring): ''' description: Check the messages produced by the agent when introducing @@ -166,7 +166,7 @@ def test_agentd_server_address_configuration(configure_local_internal_options_mo - edit_hosts: type: fixture brief: Edit the hosts file to add custom hostnames for testing. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Restart the agentd daemon for restarting the agent. - file_monitoring: diff --git a/tests/integration/test_fim/test_files/test_basic_usage/test_basic_usage_realtime_unsupported.py b/tests/integration/test_fim/test_files/test_basic_usage/test_basic_usage_realtime_unsupported.py index 48dd185c49..abddccba5c 100644 --- a/tests/integration/test_fim/test_files/test_basic_usage/test_basic_usage_realtime_unsupported.py +++ b/tests/integration/test_fim/test_files/test_basic_usage/test_basic_usage_realtime_unsupported.py @@ -91,7 +91,7 @@ def get_configuration(request): # Tests def test_realtime_unsupported(get_configuration, configure_environment, file_monitoring, - configure_local_internal_options_module, daemons_handler): + configure_local_internal_options_module, daemons_handler_module): ''' description: Check if the current OS platform falls to the 'scheduled' mode when 'realtime' is not available. For this purpose, the test performs a CUD set of operations to a file with 'realtime' mode set as @@ -117,7 +117,7 @@ def test_realtime_unsupported(get_configuration, configure_environment, file_mon - configure_local_internal_options_module: type: fixture brief: Configure the local internal options file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handle the Wazuh daemons. diff --git a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_capacity_alerts.py b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_capacity_alerts.py index 673d82460f..4e390b8727 100644 --- a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_capacity_alerts.py +++ b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_capacity_alerts.py @@ -129,7 +129,7 @@ def test_file_limit_capacity_alert(percentage, configure_local_internal_options_ the total and when the number is less than that percentage. Finally, the test will verify that on the FIM event, inodes and monitored files number match. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_default.py b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_default.py index ecc5d41912..6bb5b0b802 100644 --- a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_default.py +++ b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_default.py @@ -111,7 +111,7 @@ def test_file_limit_default(configure_local_internal_options_module, get_configu a directory and wait for FIM to start and generate an event indicating the maximum number of files to monitor. Finally, the test will verify that this number matches the default value (100000). - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_delete_full.py b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_delete_full.py index 425901b694..6447088c0c 100644 --- a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_delete_full.py +++ b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_delete_full.py @@ -138,7 +138,7 @@ def test_file_limit_delete_full(folder, file_name, configure_local_internal_opti no FIM events to be generated (file limit reached). Finally, it will delete 'test_file10' and verify that the 'deleted' FIM event matches that file. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_full.py b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_full.py index a47ab6aa7a..7e453f4672 100644 --- a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_full.py +++ b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_full.py @@ -132,7 +132,7 @@ def test_file_limit_full(configure_local_internal_options_module, get_configurat when a new testing file is added to the monitored directory. Finally, the test will verify that on the FIM event, inodes and monitored files number match. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_no_limit.py b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_no_limit.py index 267ebd3172..0ad9554a86 100644 --- a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_no_limit.py +++ b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_no_limit.py @@ -123,7 +123,7 @@ def test_file_limit_no_limit(configure_local_internal_options_module, get_config For this purpose, the test will monitor a testing directory, and finally, it will verify that the FIM event 'no limit' is generated. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_values.py b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_values.py index 833a15e1d3..efe9d33568 100644 --- a/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_values.py +++ b/tests/integration/test_fim/test_files/test_file_limit/test_file_limit_values.py @@ -131,7 +131,7 @@ def test_file_limit_values(configure_local_internal_options_module, get_configur is generated and has the correct value. Finally, the test will verify that on the FIM event, inodes and monitored files number match. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_max_eps/test_max_eps.py b/tests/integration/test_fim/test_files/test_max_eps/test_max_eps.py index d473c6c71d..7039233625 100644 --- a/tests/integration/test_fim/test_files/test_max_eps/test_max_eps.py +++ b/tests/integration/test_fim/test_files/test_max_eps/test_max_eps.py @@ -129,7 +129,7 @@ def test_max_eps(configure_local_internal_options_module, get_configuration, con the testing files created. Finally, it will verify the limit of events per second (eps) is not exceeded by checking the creation time of the testing files. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_configured.py b/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_configured.py index 5474e54fb8..38d83429cd 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_configured.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_configured.py @@ -125,7 +125,7 @@ def test_diff_size_limit_configured(configure_local_internal_options_module, get the test will verify that the value gotten from that FIM event corresponds with the one set in the 'diff_size_limit'. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_default.py b/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_default.py index 8aea598431..5919f7caa2 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_default.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_diff_size_limit_default.py @@ -118,7 +118,7 @@ def test_diff_size_limit_default(configure_local_internal_options_module, get_co file size to generate 'diff' information. Finally, the test will verify that the value gotten from that FIM event corresponds with the default value of the 'diff_size_limit' attribute (50MB). - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_default.py b/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_default.py index 101bf75634..cbe649d6e8 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_default.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_default.py @@ -112,7 +112,7 @@ def test_disk_quota_default(get_configuration, configure_environment, disk quota to store 'diff' information. Finally, the test will verify that the value gotten from that FIM event corresponds with the default value of the 'disk_quota' tag (1GB). - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_disabled.py b/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_disabled.py index 883964128b..2b6916ef37 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_disabled.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_disk_quota_disabled.py @@ -116,7 +116,7 @@ def test_disk_quota_disabled(filename, folder, size, get_configuration, configur 'disk_quota' limit. Finally, the test will verify that the FIM event related to the reached disk quota has not been generated. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_file_size_default.py b/tests/integration/test_fim/test_files/test_report_changes/test_file_size_default.py index 8739367819..068522d288 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_file_size_default.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_file_size_default.py @@ -116,7 +116,7 @@ def test_file_size_default(filename, folder, get_configuration, configure_enviro reached file size limit has been generated, and the compressed file in the 'queue/diff/local' directory does not exist. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_file_size_disabled.py b/tests/integration/test_fim/test_files/test_report_changes/test_file_size_disabled.py index 14256b783b..8ebee7e194 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_file_size_disabled.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_file_size_disabled.py @@ -117,7 +117,7 @@ def test_file_size_disabled(filename, folder, size, get_configuration, configure 'file_size' tag. Finally, the test will verify that the FIM event related to the reached file size limit has not been generated. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_file_size_values.py b/tests/integration/test_fim/test_files/test_report_changes/test_file_size_values.py index adacce9eac..cc29f0f18a 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_file_size_values.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_file_size_values.py @@ -143,7 +143,7 @@ def test_file_size_values(filename, folder, get_configuration, configure_environ file size limit has been generated, and the compressed file in the 'queue/diff/local' directory does not exist. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py b/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py index 2a58e62cae..2bb4f21934 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_large_changes.py @@ -116,7 +116,7 @@ def test_large_changes(configuration, metadata, set_wazuh_configuration, configu the test will verify that the generated FIM event contains in its 'content_changes' field the proper value depending on the test case. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_report_changes_and_diff.py b/tests/integration/test_fim/test_files/test_report_changes/test_report_changes_and_diff.py index 0234d567e6..e959c43787 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_report_changes_and_diff.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_report_changes_and_diff.py @@ -122,7 +122,7 @@ def test_reports_file_and_nodiff(configuration, metadata, set_wazuh_configuratio 'content_changes' field a message indicating that 'diff' is truncated because the 'nodiff' option is used. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_report_changes/test_report_deleted_diff.py b/tests/integration/test_fim/test_files/test_report_changes/test_report_deleted_diff.py index 7c2d25a2e7..165c440068 100644 --- a/tests/integration/test_fim/test_files/test_report_changes/test_report_deleted_diff.py +++ b/tests/integration/test_fim/test_files/test_report_changes/test_report_deleted_diff.py @@ -202,7 +202,7 @@ def test_report_when_deleted_directories(path, get_configuration, configure_envi will remove the monitored folder, wait for the FIM 'deleted' event, and verify that the corresponding 'diff' folder is deleted. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 @@ -268,7 +268,7 @@ def test_report_changes_after_restart(get_configuration, configure_environment, the directories will not be created again. Finally, the test will restore the backed configuration and verify that the initial scan of FIM is made. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_whodata_policy_change/test_whodata_policy_change.py b/tests/integration/test_fim/test_files/test_whodata_policy_change/test_whodata_policy_change.py index 1f54020337..f5e523267c 100644 --- a/tests/integration/test_fim/test_files/test_whodata_policy_change/test_whodata_policy_change.py +++ b/tests/integration/test_fim/test_files/test_whodata_policy_change/test_whodata_policy_change.py @@ -111,7 +111,7 @@ def test_whodata_policy_change(configuration, metadata, set_wazuh_configuration, - Delete the monitored folders - Restore configuration - Stop wazuh - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_files/test_windows_system_folder_redirection/test_windows_system_folder_redirection.py b/tests/integration/test_fim/test_files/test_windows_system_folder_redirection/test_windows_system_folder_redirection.py index a11639368d..84aa3fa0fa 100644 --- a/tests/integration/test_fim/test_files/test_windows_system_folder_redirection/test_windows_system_folder_redirection.py +++ b/tests/integration/test_fim/test_files/test_windows_system_folder_redirection/test_windows_system_folder_redirection.py @@ -106,7 +106,7 @@ def test_windows_system_monitoring(configuration, metadata, test_folders, set_wa - Restore configuration - Stop wazuh - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_registry/test_registry_basic_usage/test_basic_usage_registry_duplicated_entries.py b/tests/integration/test_fim/test_registry/test_registry_basic_usage/test_basic_usage_registry_duplicated_entries.py index dcea9e48e6..84d3a9c087 100644 --- a/tests/integration/test_fim/test_registry/test_registry_basic_usage/test_basic_usage_registry_duplicated_entries.py +++ b/tests/integration/test_fim/test_registry/test_registry_basic_usage/test_basic_usage_registry_duplicated_entries.py @@ -62,7 +62,7 @@ def get_configuration(request): @pytest.mark.skipif(get_version() != 'v4.2.3', reason="This test fails by wazuh/wazuh#6797, It was fixed on v4.2.3") @pytest.mark.parametrize('key, subkey1, subkey2, arch', [(key, sub_key_1, sub_key_2, fim.KEY_WOW64_32KEY)]) def test_registry_duplicated_entry(key, subkey1, subkey2, arch, get_configuration, configure_environment, - file_monitoring, configure_local_internal_options_module, daemons_handler, + file_monitoring, configure_local_internal_options_module, daemons_handler_module, wait_for_fim_start): """Two registries with capital differences must trigger just one modify the event. diff --git a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_key_limit_full.py b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_key_limit_full.py index eba5425348..cdd6644174 100644 --- a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_key_limit_full.py +++ b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_key_limit_full.py @@ -122,7 +122,7 @@ def test_registry_key_limit_full(configure_local_internal_options_module, get_co Then, it will try to add a new key and it will check if the FIM event 'full' is generated. Finally, the test will verify that, in the FIM 'entries' event, the number of entries and monitored values match. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 parameters: - configure_local_internal_options_module: diff --git a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_capacity_alerts.py b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_capacity_alerts.py index 7afb314608..5ec098e1c1 100644 --- a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_capacity_alerts.py +++ b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_capacity_alerts.py @@ -115,7 +115,7 @@ def test_registry_limit_capacity_alert(percentage, get_configuration, configure_ the total and when the number is less than that percentage. Finally, the test will verify that, in the FIM 'entries' event, the entries number is one unit more than the number of monitored values. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_values.py b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_values.py index bd27a86b32..30990d2402 100644 --- a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_values.py +++ b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_limit_values.py @@ -122,7 +122,7 @@ def test_registry_limit_values(configure_local_internal_options_module, get_conf the FIM event 'maximum number of entries' is generated and has the correct value. Finally, the test will verify that, in the FIM 'values entries' event, the number of entries and monitored values match. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_value_limit_full.py b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_value_limit_full.py index e3620d9f0a..166d0ba013 100644 --- a/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_value_limit_full.py +++ b/tests/integration/test_fim/test_registry/test_registry_limit/test_registry_value_limit_full.py @@ -125,7 +125,7 @@ def test_registry_value_limit_full(configure_local_internal_options_module, get_ when a new testing value is added to the monitored key. Finally, the test will verify that, in the FIM 'entries' event, the number of entries and monitored values match. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_synchronization/test_sync_overlap.py b/tests/integration/test_fim/test_synchronization/test_sync_overlap.py index 0ccd218efd..c238cafb8e 100644 --- a/tests/integration/test_fim/test_synchronization/test_sync_overlap.py +++ b/tests/integration/test_fim/test_synchronization/test_sync_overlap.py @@ -117,7 +117,7 @@ def test_sync_overlap(configuration, metadata, set_wazuh_configuration, configur - Check that next sync is skipped and interval value is doubled - Check that interval value is returned to configured value after successful sync - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_fim/test_synchronization/test_sync_time.py b/tests/integration/test_fim/test_synchronization/test_sync_time.py index 7f51df45a9..042733f8f6 100644 --- a/tests/integration/test_fim/test_synchronization/test_sync_time.py +++ b/tests/integration/test_fim/test_synchronization/test_sync_time.py @@ -108,7 +108,7 @@ def test_sync_time(configuration, metadata, set_wazuh_configuration, configure_l - Get all the integrity state events time. - Assert that the time it took for the sync to complete was less than the configured interval and max_interval. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 2 diff --git a/tests/integration/test_gcloud/test_configuration/test_invalid.py b/tests/integration/test_gcloud/test_configuration/test_invalid.py index eb9ec95cff..943feae947 100644 --- a/tests/integration/test_gcloud/test_configuration/test_invalid.py +++ b/tests/integration/test_gcloud/test_configuration/test_invalid.py @@ -75,7 +75,7 @@ # configurations -daemons_handler_configuration = {'daemons': ['wazuh-modulesd'], 'ignore_errors' : True} +daemons_handler_configuration = {'daemons': ['wazuh-modulesd'], 'ignore_errors': True} monitoring_modes = ['scheduled'] conf_params = {'PROJECT_ID': global_parameters.gcp_project_id, 'SUBSCRIPTION_NAME': global_parameters.gcp_subscription_name, @@ -92,10 +92,11 @@ def get_configuration(request): """Get configurations from the module.""" return request.param + # tests @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") -def test_invalid(get_configuration, configure_environment, reset_ossec_log, daemons_handler): +def test_invalid(get_configuration, configure_environment, reset_ossec_log, daemons_handler_module): ''' description: Check if the 'gcp-pubsub' module detects invalid configurations. For this purpose, the test will configure 'gcp-pubsub' using invalid configuration settings with different attributes. @@ -115,7 +116,7 @@ def test_invalid(get_configuration, configure_environment, reset_ossec_log, daem - reset_ossec_log: type: fixture brief: Reset the 'ossec.log' file and start a new monitor. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. diff --git a/tests/integration/test_gcloud/test_configuration/test_remote_configuration.py b/tests/integration/test_gcloud/test_configuration/test_remote_configuration.py index 2d076bb99c..4971626423 100644 --- a/tests/integration/test_gcloud/test_configuration/test_remote_configuration.py +++ b/tests/integration/test_gcloud/test_configuration/test_remote_configuration.py @@ -81,7 +81,7 @@ # configurations -daemons_handler_configuration = {'daemons': ['wazuh-modulesd'], 'ignore_errors' : True} +daemons_handler_configuration = {'daemons': ['wazuh-modulesd'], 'ignore_errors': True} monitoring_modes = ['scheduled'] conf_params = {'PROJECT_ID': global_parameters.gcp_project_id, 'SUBSCRIPTION_NAME': global_parameters.gcp_subscription_name, @@ -139,7 +139,8 @@ def get_remote_configuration(component_name, config): @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") -def test_remote_configuration(get_configuration, configure_environment, reset_ossec_log, daemons_handler, wait_for_gcp_start): +def test_remote_configuration(get_configuration, configure_environment, reset_ossec_log, daemons_handler_module, + wait_for_gcp_start): ''' description: Check if the remote configuration matches the local configuration of the 'gcp-pubsub' module. For this purpose, the test will use different settings and get the remote configuration applied. diff --git a/tests/integration/test_gcloud/test_configuration/test_schedule.py b/tests/integration/test_gcloud/test_configuration/test_schedule.py index 166e468f8a..676eed5456 100644 --- a/tests/integration/test_gcloud/test_configuration/test_schedule.py +++ b/tests/integration/test_gcloud/test_configuration/test_schedule.py @@ -96,7 +96,7 @@ def get_configuration(request): # tests @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") -def test_schedule(get_configuration, configure_environment, reset_ossec_log, daemons_handler): +def test_schedule(get_configuration, configure_environment, reset_ossec_log, daemons_handler_module): ''' description: Check if the 'gcp-pubsub' module is executed in the periods specified in the 'interval' tag. For this purpose, the test will use different values for the 'interval' tag (a positive number @@ -137,15 +137,15 @@ def test_schedule(get_configuration, configure_environment, reset_ossec_log, dae str_interval = get_configuration['sections'][0]['elements'][3]['interval']['value'] time_interval = int(''.join(filter(str.isdigit, str_interval))) tags_to_apply = get_configuration['tags'][0] - + # Warning log must appear in log (cause interval is not compatible with ) if (tags_to_apply == 'schedule_day' and 'M' not in str_interval) or \ - (tags_to_apply == 'schedule_wday' and 'w' not in str_interval) or \ - (tags_to_apply == 'schedule_time' and ('d' not in str_interval and 'w' not in str_interval)): + (tags_to_apply == 'schedule_wday' and 'w' not in str_interval) or \ + (tags_to_apply == 'schedule_time' and ('d' not in str_interval and 'w' not in str_interval)): wazuh_log_monitor.start(timeout=global_parameters.default_timeout + time_interval, callback=callback_detect_schedule_validate_parameters_warn, error_message='Did not receive expected ' - 'at _sched_scan_validate_parameters(): WARNING:').result() + 'at _sched_scan_validate_parameters(): WARNING:').result() # Warning is not suppose to appear else: with pytest.raises(TimeoutError): diff --git a/tests/integration/test_gcloud/test_functionality/test_day_wday.py b/tests/integration/test_gcloud/test_functionality/test_day_wday.py index a84e45ca45..7a41bee9a4 100644 --- a/tests/integration/test_gcloud/test_functionality/test_day_wday.py +++ b/tests/integration/test_gcloud/test_functionality/test_day_wday.py @@ -59,11 +59,10 @@ import pytest from wazuh_testing import global_parameters from wazuh_testing.fim import generate_params -from wazuh_testing.gcloud import callback_detect_start_fetching_logs, callback_detect_start_gcp_sleep +from wazuh_testing.gcloud import callback_detect_start_gcp_sleep from wazuh_testing.tools import LOG_FILE_PATH from wazuh_testing.tools.configuration import load_wazuh_configurations, check_apply_test from wazuh_testing.tools.monitoring import FileMonitor -from wazuh_testing.tools.time import TimeMachine # Marks @@ -71,26 +70,29 @@ # variables +wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) +test_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +configurations_path = os.path.join(test_data_path, 'wazuh_schedule_conf.yaml') +force_restart_after_restoring = False interval = '1h' pull_on_start = 'no' max_messages = 100 logging = "info" - -today = datetime.date.today() -day = today.day - -weekDays = ("Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", "Sunday") monthDays = {"1": 31, "2": 28, "3": 31, "4": 30, "5": 31, "6": 30, "7": 31, "8": 31, "9": 30, "10": 31, "11": 30, "12": 31} -wday = weekDays[today.weekday()] -now = datetime.datetime.now() -day_time = now.strftime("%H:%M") -wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) -test_data_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') -configurations_path = os.path.join(test_data_path, 'wazuh_schedule_conf.yaml') -force_restart_after_restoring = False +def set_datetime_info(): + """Set datetime info globally.""" + global today, day, wday, day_time + + today = datetime.datetime.today() + day = today.day + wday = today.strftime('%A') + day_time = today.strftime("%H:%M") + + +set_datetime_info() # configurations @@ -125,7 +127,7 @@ def get_configuration(request): ({'ossec_time_conf'}) ]) @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") -def test_day_wday(tags_to_apply, get_configuration, configure_environment, reset_ossec_log, daemons_handler, +def test_day_wday(tags_to_apply, get_configuration, configure_environment, reset_ossec_log, daemons_handler_module, wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module starts to pull logs according to the day of the week, @@ -174,36 +176,20 @@ def test_day_wday(tags_to_apply, get_configuration, configure_environment, reset - scheduled - time_travel ''' - def get_next_scan(next_scan_time: str): - next_scan_time = next_scan_time_log.split() - date = next_scan_time[0].split('/') - hour = next_scan_time[1].split(':') - - date_before = datetime.datetime.now() - - date_after = datetime.datetime(int(date[0]), int(date[1]), int(date[2]), - int(hour[0]), int(hour[1]), int(hour[2])) - diff_time = (date_after - date_before).total_seconds() - - return int(diff_time) - check_apply_test(tags_to_apply, get_configuration['tags']) - next_scan_time_log = wazuh_log_monitor.start(timeout=global_parameters.default_timeout, - callback=callback_detect_start_gcp_sleep, - error_message='Did not receive expected ' - '"Sleeping until ..." event').result() + wazuh_log_monitor.start(timeout=global_parameters.default_timeout, callback=callback_detect_start_gcp_sleep, + error_message='Did not receive expected "Sleeping until ..." event').result() @pytest.mark.parametrize('tags_to_apply', [ ({'ossec_day_multiple_conf'}), - ({'ossec_wday_multiple_conf'}), + pytest.param({'ossec_wday_multiple_conf'}, marks=pytest.mark.xfail(reason="Unstable because of wazuh/wazuh#15255")), ({'ossec_time_multiple_conf'}) ]) -@pytest.mark.xfail(reason="Blocked by wazuh/wazuh#15255") @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") -def test_day_wday_multiple(tags_to_apply, get_configuration, configure_environment, reset_ossec_log, daemons_handler, - wait_for_gcp_start): +def test_day_wday_multiple(tags_to_apply, get_configuration, configure_environment, reset_ossec_log, + daemons_handler_module, wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module calculates the next scan correctly using time intervals greater than one month, one week, or one day. For this purpose, the test will use different @@ -249,8 +235,13 @@ def test_day_wday_multiple(tags_to_apply, get_configuration, configure_environme ''' check_apply_test(tags_to_apply, get_configuration['tags']) - str_interval = get_configuration['sections'][0]['elements'][4]['interval']['value'] - time_interval = int(''.join(filter(str.isdigit, str_interval))) + interval, unit = get_configuration['sections'][0]['elements'][4]['interval']['value'] + interval = int(interval) + kwargs = {'days': 0 if unit != 'd' else interval, 'weeks': 0 if unit != 'w' else interval} + # Update datetime info globally + set_datetime_info() + # Get the expected date before the test run to avoid a day difference with Wazuh's scheduled scan + expected_next_scan_date = today + datetime.timedelta(**kwargs) next_scan_time_log = wazuh_log_monitor.start(timeout=global_parameters.default_timeout + 60, callback=callback_detect_start_gcp_sleep, @@ -258,26 +249,23 @@ def test_day_wday_multiple(tags_to_apply, get_configuration, configure_environme '"Sleeping until ..." event').result() next_scan_time_spl = next_scan_time_log.split(" ") - date = next_scan_time_spl[0].split("/") - hour = next_scan_time_spl[1].split(":") + year, month, day = [int(_) for _ in next_scan_time_spl[0].split("/")] + hour, minute, second = [int(_) for _ in next_scan_time_spl[1].split(":")] - next_scan_time = datetime.datetime(int(date[0]), int(date[1]), int(date[2]), int(hour[0]), int(hour[1]), - int(hour[2])) + next_scan_time = datetime.datetime(year, month, day, hour, minute, second) + next_scan_time_weekday = next_scan_time.strftime('%A') if tags_to_apply == {'ossec_day_multiple_conf'}: - if today.month + time_interval <= 12: - expected_month = today.month + time_interval + if today.month + interval <= 12: + expected_month = today.month + interval else: - expected_month = (today.month + time_interval) % 12 + expected_month = (today.month + interval) % 12 if today.day > monthDays[str(expected_month)]: expected_month = expected_month + 1 assert next_scan_time.month == expected_month - - if tags_to_apply == {'ossec_wday_multiple_conf'}: - assert weekDays[next_scan_time.weekday()] == wday - assert next_scan_time.day == (today + datetime.timedelta(weeks=time_interval)).day - - if tags_to_apply == {'ossec_time_multiple_conf'}: - assert next_scan_time.day == (today + datetime.timedelta(days=time_interval)).day + else: + assert next_scan_time.day == expected_next_scan_date.day + if tags_to_apply == {'ossec_wday_multiple_conf'}: + assert next_scan_time_weekday == wday diff --git a/tests/integration/test_gcloud/test_functionality/test_interval.py b/tests/integration/test_gcloud/test_functionality/test_interval.py index c239bcf048..657a4dc4b0 100644 --- a/tests/integration/test_gcloud/test_functionality/test_interval.py +++ b/tests/integration/test_gcloud/test_functionality/test_interval.py @@ -97,6 +97,7 @@ truncate_file(LOG_FILE_PATH) + # fixtures @pytest.fixture(scope='module', params=configurations) @@ -108,7 +109,8 @@ def get_configuration(request): # tests @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") -def test_interval(get_configuration, configure_environment, reset_ossec_log, daemons_handler, wait_for_gcp_start): +def test_interval(get_configuration, configure_environment, reset_ossec_log, daemons_handler_module, + wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module starts to pull logs at the periods set in the configuration by the 'interval' tag. For this purpose, the test will use different intervals and check if @@ -176,4 +178,4 @@ def test_interval(get_configuration, configure_environment, reset_ossec_log, dae '"Starting fetching of logs" event').result() end_time = time.time() diff_time = int(end_time - start_time) - assert time_interval - diff_time <= 10 \ No newline at end of file + assert time_interval - diff_time <= 10 diff --git a/tests/integration/test_gcloud/test_functionality/test_logging.py b/tests/integration/test_gcloud/test_functionality/test_logging.py index 92f9f5a291..7121d8d7f9 100644 --- a/tests/integration/test_gcloud/test_functionality/test_logging.py +++ b/tests/integration/test_gcloud/test_functionality/test_logging.py @@ -89,14 +89,14 @@ modes=monitoring_modes) configurations = conf.load_wazuh_configurations(configurations_path, __name__, - params=p, metadata=m) + params=p, metadata=m) # fixtures -@pytest.fixture(scope='module', params= [ +@pytest.fixture(scope='module', params=[ {'wazuh_modules.debug': 0, - 'monitord.rotate_log': 0, 'monitord.day_wait': 0, - 'monitord.keep_log_days': 0, 'monitord.size_rotate': 0}, + 'monitord.rotate_log': 0, 'monitord.day_wait': 0, + 'monitord.keep_log_days': 0, 'monitord.size_rotate': 0}, {'wazuh_modules.debug': 1, 'monitord.rotate_log': 0, 'monitord.day_wait': 0, 'monitord.keep_log_days': 0, 'monitord.size_rotate': 0}, @@ -125,7 +125,6 @@ def configure_local_internal_options_module(get_local_internal_options): import wazuh_testing.tools.services as services services.restart_wazuh_daemon('wazuh-modulesd') - yield conf.set_local_internal_options_dict(backup_local_internal_options) @@ -145,7 +144,7 @@ def get_configuration(request): ], indirect=True) def test_logging(get_configuration, configure_environment, reset_ossec_log, publish_messages, configure_local_internal_options_module, - daemons_handler, wait_for_gcp_start): + daemons_handler_module, wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module generates logs according to the debug level set for wazuh_modules. For this purpose, the test will use different debug levels (depending on the test case) and @@ -195,7 +194,7 @@ def test_logging(get_configuration, configure_environment, reset_ossec_log, ''' str_interval = get_configuration['sections'][0]['elements'][4]['interval']['value'] logging_opt = int([x[-2] for x in conf.get_wazuh_local_internal_options() - if x.startswith('wazuh_modules.debug')][0]) + if x.startswith('wazuh_modules.debug')][0]) time_interval = int(''.join(filter(str.isdigit, str_interval))) mandatory_keywords = {} if logging_opt == 0: diff --git a/tests/integration/test_gcloud/test_functionality/test_max_messages.py b/tests/integration/test_gcloud/test_functionality/test_max_messages.py index 5ca6ba1b2f..b8040a73cc 100644 --- a/tests/integration/test_gcloud/test_functionality/test_max_messages.py +++ b/tests/integration/test_gcloud/test_functionality/test_max_messages.py @@ -121,8 +121,9 @@ def get_configuration(request): ['- DEBUG - GCP message' for _ in range(100)], ['- DEBUG - GCP message' for _ in range(120)] ], indirect=True) +@pytest.mark.xfail(reason='Unstable, further information in wazuh/wazuh#17245') def test_max_messages(get_configuration, configure_environment, reset_ossec_log, publish_messages, - daemons_handler, wait_for_gcp_start): + daemons_handler_module, wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module pulls a message number less than or equal to the limit set in the 'max_messages' tag. For this purpose, the test will use a fixed limit and generate a diff --git a/tests/integration/test_gcloud/test_functionality/test_pull_on_start.py b/tests/integration/test_gcloud/test_functionality/test_pull_on_start.py index 9175628aa6..916bbcdc64 100644 --- a/tests/integration/test_gcloud/test_functionality/test_pull_on_start.py +++ b/tests/integration/test_gcloud/test_functionality/test_pull_on_start.py @@ -104,7 +104,7 @@ def get_configuration(request): @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") def test_pull_on_start(get_configuration, configure_environment, - daemons_handler, wait_for_gcp_start): + daemons_handler_module, wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module pulls messages when starting if the 'pull_on_start' is set to 'yes', or sleeps up to the next interval if that one is set to 'no'. For this diff --git a/tests/integration/test_gcloud/test_functionality/test_rules.py b/tests/integration/test_gcloud/test_functionality/test_rules.py index a6441fa587..e28dcf3d32 100644 --- a/tests/integration/test_gcloud/test_functionality/test_rules.py +++ b/tests/integration/test_gcloud/test_functionality/test_rules.py @@ -95,6 +95,7 @@ truncate_file(LOG_FILE_PATH) + # fixtures @pytest.fixture(scope='module', params=configurations) @@ -105,9 +106,10 @@ def get_configuration(request): # tests +@pytest.mark.xfail(reason='Unstable, further information in wazuh/wazuh#17245') @pytest.mark.skipif(sys.platform == "win32", reason="Windows does not have support for Google Cloud integration.") def test_rules(get_configuration, configure_environment, - daemons_handler, wait_for_gcp_start): + daemons_handler_module, wait_for_gcp_start): ''' description: Check if the 'gcp-pubsub' module gets messages matching the GCP rules. It also checks if the triggered alerts contain the proper rule ID. For this purpose, the test will diff --git a/tests/integration/test_integratord/conftest.py b/tests/integration/test_integratord/conftest.py index 1243d020fa..64db4af575 100644 --- a/tests/integration/test_integratord/conftest.py +++ b/tests/integration/test_integratord/conftest.py @@ -3,20 +3,23 @@ Created by Wazuh, Inc. . This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 ''' - - import pytest +from wazuh_testing import T_5 from wazuh_testing.tools import LOG_FILE_PATH -from wazuh_testing.tools.monitoring import FileMonitor, generate_monitoring_callback -from wazuh_testing.modules import integratord as integrator -from wazuh_testing.modules.integratord.event_monitor import check_integratord_event +from wazuh_testing.tools.monitoring import FileMonitor +from wazuh_testing.modules import analysisd +from wazuh_testing.modules.analysisd.event_monitor import check_analysisd_event +from wazuh_testing.modules.integratord import event_monitor as evm @pytest.fixture(scope='function') def wait_for_start_module(request): # Wait for integratord thread to start file_monitor = FileMonitor(LOG_FILE_PATH) - check_integratord_event(file_monitor=file_monitor, timeout=20, - callback=generate_monitoring_callback(integrator.CB_INTEGRATORD_THREAD_READY), - error_message=integrator.ERR_MSG_VIRUST_TOTAL_ENABLED_NOT_FOUND) + evm.check_integratord_thread_ready(file_monitor=file_monitor) + + # Wait for analysisd to start successfully (to detect changes in the alerts.json file) + check_analysisd_event(file_monitor=file_monitor, timeout=T_5, + callback=analysisd.CB_ANALYSISD_STARTUP_COMPLETED, + error_message=analysisd.ERR_MSG_STARTUP_COMPLETED_NOT_FOUND) diff --git a/tests/integration/test_integratord/data/configuration_template/config_integratord_read_json_alerts.yaml b/tests/integration/test_integratord/data/configuration/configuration_alerts_reading.yaml similarity index 54% rename from tests/integration/test_integratord/data/configuration_template/config_integratord_read_json_alerts.yaml rename to tests/integration/test_integratord/data/configuration/configuration_alerts_reading.yaml index acf8a61bfb..f68eba1317 100644 --- a/tests/integration/test_integratord/data/configuration_template/config_integratord_read_json_alerts.yaml +++ b/tests/integration/test_integratord/data/configuration/configuration_alerts_reading.yaml @@ -1,39 +1,35 @@ -- tags: - - all - apply_to_modules: - - test_integratord_read_json_alerts - - test_integratord_change_inode_alert - - test_integratord_read_json_file_deleted - sections: +- sections: - section: integration elements: - name: - value: virustotal - - api_key: - value: API_KEY + value: slack + - hook_url: + value: WEBHOOK_URL - rule_id: - value: '554' + value: 5712 + - level: + value: 10 - alert_format: value: json + - section: sca elements: - enabled: value: 'no' - - section: rootcheck - elements: - - disabled: - value: 'yes' + - section: syscheck elements: - disabled: value: 'yes' + - section: wodle attributes: - - name: syscollector + - name: vulnerability-detector elements: - disabled: value: 'yes' - - section: auth + + - section: rootcheck elements: - disabled: value: 'yes' diff --git a/tests/integration/test_integratord/data/test_cases/cases_integratord_change_inode_alert.yaml b/tests/integration/test_integratord/data/test_cases/cases_integratord_change_inode_alert.yaml index 58b53b1702..c3b3691da9 100644 --- a/tests/integration/test_integratord/data/test_cases/cases_integratord_change_inode_alert.yaml +++ b/tests/integration/test_integratord/data/test_cases/cases_integratord_change_inode_alert.yaml @@ -1,17 +1,22 @@ -- name: Cannot read alerts - Inode changed +- name: cannot_read_alerts_file_inode_changed description: The alerts.json file inode has changed and it cannot read alerts from it until it reloads. configuration_parameters: - API_KEY: Insert using --integration-api-key parameter + WEBHOOK_URL: Insert using --slack-webhook-url parameter metadata: - alert_sample: '{"timestamp":"2022-07-20T14:53:16.482+0000","rule":{"level":5,"description": - "File added to the system.", "id":"554","firedtimes":1,"mail":false,"groups":["ossec","syscheck", - "syscheck_entry_added","syscheck_file"],"pci_dss":["11.5"],"gpg13":["4.11"],"gdpr":["II_5.1.f"], - "hipaa":["164.312.c.1","164.312.c.2"],"nist_800_53":["SI.7"],"tsc":["PI1.4","PI1.5","CC6.1","CC6.8", - "CC7.2","CC7.3"]},"agent":{"id":"000","name":"padding_input"},"manager":{"name":"c3"},"id": - "1657551196.2754","full_log":"File /test_folder/TEST_CHANGED_INODE_ALERT.txt added\nMode: - scheduled\n","syscheck":{"path":"/test_folder/TEST_CHANGED_INODE_ALERT.txt","mode":"scheduled", - "size_after":"16","perm_after":"rw-r--r--","uid_after":"0","gid_after":"0","md5_after": - "2982666f29e2736e7ca0e12dd638d433","sha1_after":"49999430cc5652dedd26352b0342097e8fa3affe", - "sha256_after":"32bc19c9406a98ab21e5ec79fbd5bba2cb79755607a9f382c662d37b5bf5d8ea","uname_after": - "root","gname_after":"root","mtime_after":"2022-07-11T14:53:07","inode_after":9793,"event":"added"}, - "decoder":{"name":"syscheck_new_entry"},"location":"syscheck"}' + alert_sample: '{"timestamp":"2022-05-11T12:29:19.905+0000","rule":{"level":10,"description": + "sshd: brute force trying to get access to the system. Non existent user.","id":"5712", + "mitre":{"id":["T1110"],"tactic":["Credential Access"],"technique":["Brute Force"]},"frequency":8, + "firedtimes":1,"mail":false,"groups":["syslog","sshd","authentication_failures"],"gdpr": + ["IV_35.7.d","IV_32.2"],"hipaa":["164.312.b"],"nist_800_53":["SI.4","AU.14","AC.7"],"pci_dss": + ["11.4","10.2.4","10.2.5"],"tsc":["CC6.1","CC6.8","CC7.2","CC7.3"]},"agent":{"id":"000","name": + "localhost.localdomain"},"manager":{"name":"localhost.localdomain"},"id":"1652272159.1549653", + "previous_output":"May 11 12:29:16 localhost sshd[17582]: Invalid user paco from 172.17.1.1 port + 56402\nMay 11 12:29:14 localhost sshd[17580]: Invalid user paco from 172.17.1.1 port 56400\nMay + 11 12:29:11 localhost sshd[17578]: Invalid user paco from 172.17.1.1 port 56398\nMay 11 12:29:09 + localhost sshd[17576]: Invalid user paco from 172.17.1.1 port 56396\nMay 11 12:29:07 localhost + sshd[17574]: Invalid user paco from 172.17.1.1 port 56394\nMay 11 12:29:04 localhost sshd[17572]: + Invalid user paco from 172.17.1.1 port 56392\nMay 11 12:29:00 localhost sshd[17570]: Invalid user + paco from 172.17.1.1 port 56390","full_log":"May 11 12:29:18 localhost sshd[17584]: Invalid user + paco from 172.17.1.1 port 56404","predecoder":{"program_name":"sshd","timestamp":"May 11 12:29:18", + "hostname":"localhost"},"decoder":{"parent":"sshd","name":"sshd"},"data":{"srcip":"172.17.1.1", + "srcport":"56404","srcuser":"paco"},"location":"/var/log/secure"}' diff --git a/tests/integration/test_integratord/data/test_cases/cases_integratord_read_invalid_json_alerts.yaml b/tests/integration/test_integratord/data/test_cases/cases_integratord_read_invalid_json_alerts.yaml index d8fa3d4196..65c2c501f3 100644 --- a/tests/integration/test_integratord/data/test_cases/cases_integratord_read_invalid_json_alerts.yaml +++ b/tests/integration/test_integratord/data/test_cases/cases_integratord_read_invalid_json_alerts.yaml @@ -1,37 +1,47 @@ -- name: Read invalid json alert +- name: read_invalid_json_alert description: Read a invalid alert from alerts.json - removed rule key name - Integration fails configuration_parameters: - API_KEY: Insert using --integration-api-key parameter + WEBHOOK_URL: Insert using --slack-webhook-url parameter metadata: - alert_sample: '{"timestamp":"2022-07-20T14:53:16.482+0000",:{"level":5,"description":"File added to the system.", - "id":"554","firedtimes":1,"mail":false,"groups":["ossec","syscheck","syscheck_entry_added", - "syscheck_file"],"pci_dss":["11.5"],"gpg13":["4.11"],"gdpr":["II_5.1.f"],"hipaa":["164.312.c.1", - "164.312.c.2"],"nist_800_53":["SI.7"],"tsc":["PI1.4","PI1.5","CC6.1","CC6.8","CC7.2","CC7.3"]}, - "agent":{"id":"000","name":"padding_input"},"manager":{"name":"c3"},"id":"1657551196.2754", - "full_log":"File /test_folder/TEST_INVALID_ALERT.txt added\nMode: scheduled\n","syscheck":{"path": - "/test_folder/TEST_INVALID_ALERT.txt","mode":"scheduled","size_after":"16","perm_after":"rw-r--r--" - ,"uid_after":"0","gid_after":"0","md5_after":"2982666f29e2736e7ca0e12dd638d433", - "sha1_after":"49999430cc5652dedd26352b0342097e8fa3affe", - "sha256_after":"32bc19c9406a98ab21e5ec79fbd5bba2cb79755607a9f382c662d37b5bf5d8ea","uname_after": - "root","gname_after":"root","mtime_after":"2022-07-11T14:53:07","inode_after":9793,"event":"added"}, - "decoder":{"name":"syscheck_new_entry"},"location":"syscheck"}' + alert_sample: '{"timestamp":"2022-05-11T12:29:19.905+0000",:{"level":10,"description": + "sshd: brute force trying to get access to the system. Non existent user.","id":"5712", + "mitre":{"id":["T1110"],"tactic":["Credential Access"],"technique":["Brute Force"]},"frequency":8, + "firedtimes":1,"mail":false,"groups":["syslog","sshd","authentication_failures"],"gdpr": + ["IV_35.7.d","IV_32.2"],"hipaa":["164.312.b"],"nist_800_53":["SI.4","AU.14","AC.7"],"pci_dss": + ["11.4","10.2.4","10.2.5"],"tsc":["CC6.1","CC6.8","CC7.2","CC7.3"]},"agent":{"id":"000","name": + "localhost.localdomain"},"manager":{"name":"localhost.localdomain"},"id":"1652272159.1549653", + "previous_output":"May 11 12:29:16 localhost sshd[17582]: Invalid user paco from 172.17.1.1 port + 56402\nMay 11 12:29:14 localhost sshd[17580]: Invalid user paco from 172.17.1.1 port 56400\nMay + 11 12:29:11 localhost sshd[17578]: Invalid user paco from 172.17.1.1 port 56398\nMay 11 12:29:09 + localhost sshd[17576]: Invalid user paco from 172.17.1.1 port 56396\nMay 11 12:29:07 localhost + sshd[17574]: Invalid user paco from 172.17.1.1 port 56394\nMay 11 12:29:04 localhost sshd[17572]: + Invalid user paco from 172.17.1.1 port 56392\nMay 11 12:29:00 localhost sshd[17570]: Invalid user + paco from 172.17.1.1 port 56390","full_log":"May 11 12:29:18 localhost sshd[17584]: Invalid user + paco from 172.17.1.1 port 56404","predecoder":{"program_name":"sshd","timestamp":"May 11 12:29:18", + "hostname":"localhost"},"decoder":{"parent":"sshd","name":"sshd"},"data":{"srcip":"172.17.1.1", + "srcport":"56404","srcuser":"paco"},"location":"/var/log/secure"}' alert_type: invalid -- name: Read Overlong json alert +- name: read_overlong_json_alert description: Read a an alert that is over 64kb alert from alerts.json - Integration fails configuration_parameters: - API_KEY: Insert using --integration-api-key parameter + WEBHOOK_URL: Insert using --slack-webhook-url parameter metadata: - alert_sample: '{"timestamp":"2022-07-20T14:53:16.482+0000","rule":{"level":5,"description": - "File added to the system.","id":"554","firedtimes":1,"mail":false,"groups":["ossec","syscheck", - "syscheck_entry_added","syscheck_file"],"pci_dss":["11.5"],"gpg13":["4.11"],"gdpr":["II_5.1.f"], - "hipaa":["164.312.c.1","164.312.c.2"],"nist_800_53":["SI.7"],"tsc":["PI1.4","PI1.5","CC6.1","CC6.8", - "CC7.2","CC7.3"]},"agent":{"id":"000","name":"padding_input"},"manager":{"name":"c3"},"id": - "1657551196.2754","full_log":"File /test_folder/TEST_INVALID_ALERT.txt added\nMode: scheduled\n", - "syscheck":{"path":"/test_folder/TEST_INVALID_ALERT.txt","mode":"scheduled","size_after":"16", - "perm_after":"rw-r--r--","uid_after":"0","gid_after":"0","md5_after": - "2982666f29e2736e7ca0e12dd638d433","sha1_after":"49999430cc5652dedd26352b0342097e8fa3affe", - "sha256_after":"32bc19c9406a98ab21e5ec79fbd5bba2cb79755607a9f382c662d37b5bf5d8ea","uname_after": - "root","gname_after":"root","mtime_after":"2022-07-11T14:53:07","inode_after":9793,"event":"added"}, - "decoder":{"name":"syscheck_new_entry"},"location":"syscheck"}' + alert_sample: '{"timestamp":"2022-05-11T12:29:19.905+0000","rule":{"level":10,"description": + "sshd: brute force trying to get access to the system. Non existent user.","id":"5712", + "mitre":{"id":["T1110"],"tactic":["Credential Access"],"technique":["Brute Force"]},"frequency":8, + "firedtimes":1,"mail":false,"groups":["syslog","sshd","authentication_failures"],"gdpr": + ["IV_35.7.d","IV_32.2"],"hipaa":["164.312.b"],"nist_800_53":["SI.4","AU.14","AC.7"],"pci_dss": + ["11.4","10.2.4","10.2.5"],"tsc":["CC6.1","CC6.8","CC7.2","CC7.3"]},"agent":{"id":"000","name": + "padding_input"},"manager":{"name":"localhost.localdomain"},"id":"1652272159.1549653", + "previous_output":"May 11 12:29:16 localhost sshd[17582]: Invalid user paco from 172.17.1.1 port + 56402\nMay 11 12:29:14 localhost sshd[17580]: Invalid user paco from 172.17.1.1 port 56400\nMay + 11 12:29:11 localhost sshd[17578]: Invalid user paco from 172.17.1.1 port 56398\nMay 11 12:29:09 + localhost sshd[17576]: Invalid user paco from 172.17.1.1 port 56396\nMay 11 12:29:07 localhost + sshd[17574]: Invalid user paco from 172.17.1.1 port 56394\nMay 11 12:29:04 localhost sshd[17572]: + Invalid user paco from 172.17.1.1 port 56392\nMay 11 12:29:00 localhost sshd[17570]: Invalid user + paco from 172.17.1.1 port 56390","full_log":"May 11 12:29:18 localhost sshd[17584]: Invalid user + paco from 172.17.1.1 port 56404","predecoder":{"program_name":"sshd","timestamp":"May 11 12:29:18", + "hostname":"localhost"},"decoder":{"parent":"sshd","name":"sshd"},"data":{"srcip":"172.17.1.1", + "srcport":"56404","srcuser":"paco"},"location":"/var/log/secure"}' alert_type: overlong diff --git a/tests/integration/test_integratord/data/test_cases/cases_integratord_read_json_file_deleted.yaml b/tests/integration/test_integratord/data/test_cases/cases_integratord_read_json_file_deleted.yaml deleted file mode 100644 index 330f623cde..0000000000 --- a/tests/integration/test_integratord/data/test_cases/cases_integratord_read_json_file_deleted.yaml +++ /dev/null @@ -1,17 +0,0 @@ -- name: Cannot read alerts - Json File Deleted - description: The alerts.json file is missing and it cannot read alerts from it. - configuration_parameters: - API_KEY: Insert using --integration-api-key parameter - metadata: - alert_sample: '{"timestamp":"2022-07-20T14:53:16.482+0000","rule":{"level":5,"description": - "File added to the system.","id":"554","firedtimes":1,"mail":false,"groups":["ossec","syscheck", - "syscheck_entry_added","syscheck_file"],"pci_dss":["11.5"],"gpg13":["4.11"],"gdpr":["II_5.1.f"], - "hipaa":["164.312.c.1","164.312.c.2"],"nist_800_53":["SI.7"],"tsc":["PI1.4","PI1.5","CC6.1","CC6.8", - "CC7.2","CC7.3"]},"agent":{"id":"000","name":"padding_input"},"manager":{"name":"c3"},"id": - "1657551196.2754","full_log":"File /test_folder/TEST_FILE_DELETED_ALERT.txt added\nMode: - scheduled\n","syscheck":{"path":"/test_folder/TEST_FILE_DELETED_ALERT.txt","mode":"scheduled", - "size_after":"16","perm_after":"rw-r--r--","uid_after":"0","gid_after":"0","md5_after": - "2982666f29e2736e7ca0e12dd638d433","sha1_after":"49999430cc5652dedd26352b0342097e8fa3affe", - "sha256_after":"32bc19c9406a98ab21e5ec79fbd5bba2cb79755607a9f382c662d37b5bf5d8ea","uname_after": - "root","gname_after":"root","mtime_after":"2022-07-11T14:53:07","inode_after":9793,"event":"added"}, - "decoder":{"name":"syscheck_new_entry"},"location":"syscheck"}' diff --git a/tests/integration/test_integratord/data/test_cases/cases_integratord_read_valid_json_alerts.yaml b/tests/integration/test_integratord/data/test_cases/cases_integratord_read_valid_json_alerts.yaml index f1fabd521c..8ee984321a 100644 --- a/tests/integration/test_integratord/data/test_cases/cases_integratord_read_valid_json_alerts.yaml +++ b/tests/integration/test_integratord/data/test_cases/cases_integratord_read_valid_json_alerts.yaml @@ -1,17 +1,22 @@ -- name: Read valid json alert +- name: read_valid_json_alert description: Read a valid alert from alerts.json configuration_parameters: - API_KEY: Insert using --integration-api-key parameter + WEBHOOK_URL: Insert using --slack-webhook-url parameter metadata: - alert_sample: '{"timestamp":"2022-07-20T14:53:16.482+0000","rule":{"level":5,"description": - "File added to the system.","id":"554","firedtimes":1,"mail":false,"groups":["ossec","syscheck", - "syscheck_entry_added","syscheck_file"],"pci_dss":["11.5"],"gpg13":["4.11"],"gdpr":["II_5.1.f"], - "hipaa":["164.312.c.1","164.312.c.2"],"nist_800_53":["SI.7"],"tsc":["PI1.4","PI1.5","CC6.1","CC6.8", - "CC7.2","CC7.3"]},"agent":{"id":"000","name":"padding_input"},"manager":{"name":"c3"},"id": - "1657551196.2754","full_log":"File /test_folder/TEST_VALID_ALERT.txt added\nMode: scheduled\n", - "syscheck":{"path":"/test_folder/TEST_VALID_ALERT.txt","mode":"scheduled","size_after":"16", - "perm_after":"rw-r--r--","uid_after":"0","gid_after":"0","md5_after": - "2982666f29e2736e7ca0e12dd638d433","sha1_after":"49999430cc5652dedd26352b0342097e8fa3affe", - "sha256_after":"32bc19c9406a98ab21e5ec79fbd5bba2cb79755607a9f382c662d37b5bf5d8ea","uname_after": - "root","gname_after":"root","mtime_after":"2022-07-11T14:53:07","inode_after":9793,"event":"added"}, - "decoder":{"name":"syscheck_new_entry"},"location":"syscheck"}' + alert_sample: '{"timestamp":"2022-05-11T12:29:19.905+0000","rule":{"level":10,"description": + "sshd: brute force trying to get access to the system. Non existent user.","id":"5712", + "mitre":{"id":["T1110"],"tactic":["Credential Access"],"technique":["Brute Force"]},"frequency":8, + "firedtimes":1,"mail":false,"groups":["syslog","sshd","authentication_failures"],"gdpr": + ["IV_35.7.d","IV_32.2"],"hipaa":["164.312.b"],"nist_800_53":["SI.4","AU.14","AC.7"],"pci_dss": + ["11.4","10.2.4","10.2.5"],"tsc":["CC6.1","CC6.8","CC7.2","CC7.3"]},"agent":{"id":"000","name": + "localhost.localdomain"},"manager":{"name":"localhost.localdomain"},"id":"1652272159.1549653", + "previous_output":"May 11 12:29:16 localhost sshd[17582]: Invalid user paco from 172.17.1.1 port + 56402\nMay 11 12:29:14 localhost sshd[17580]: Invalid user paco from 172.17.1.1 port 56400\nMay + 11 12:29:11 localhost sshd[17578]: Invalid user paco from 172.17.1.1 port 56398\nMay 11 12:29:09 + localhost sshd[17576]: Invalid user paco from 172.17.1.1 port 56396\nMay 11 12:29:07 localhost + sshd[17574]: Invalid user paco from 172.17.1.1 port 56394\nMay 11 12:29:04 localhost sshd[17572]: + Invalid user paco from 172.17.1.1 port 56392\nMay 11 12:29:00 localhost sshd[17570]: Invalid user + paco from 172.17.1.1 port 56390","full_log":"May 11 12:29:18 localhost sshd[17584]: Invalid user + paco from 172.17.1.1 port 56404","predecoder":{"program_name":"sshd","timestamp":"May 11 12:29:18", + "hostname":"localhost"},"decoder":{"parent":"sshd","name":"sshd"},"data":{"srcip":"172.17.1.1", + "srcport":"56404","srcuser":"paco"},"location":"/var/log/secure"}' diff --git a/tests/integration/test_integratord/test_alerts_reading.py b/tests/integration/test_integratord/test_alerts_reading.py new file mode 100644 index 0000000000..c3507f9ca9 --- /dev/null +++ b/tests/integration/test_integratord/test_alerts_reading.py @@ -0,0 +1,357 @@ +''' +copyright: Copyright (C) 2015-2022, Wazuh Inc. + Created by Wazuh, Inc. . + This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +type: integration + +brief: Integratord manages Wazuh integrations with other applications such as Yara or Slack, by feeding +the integrated aplications with the alerts located in alerts.json file. This test module aims to validate that +given a specific alert, the expected response is recieved, depending if it is a valid/invalid json alert, an +overlong alert (64kb+) or what happens when it cannot read the file because it is missing. + +components: + - integratord + +suite: test_integratord + +targets: + - manager + +daemons: + - wazuh-integratord + +os_platform: + - Linux + +os_version: + - Centos 8 + - Ubuntu Focal + +references: + - https://documentation.wazuh.com/current/user-manual/manager/manual-integration.html#slack + - https://documentation.wazuh.com/current/user-manual/reference/daemons/wazuh-integratord.html + +pytest_args: + - tier: + 0: Only level 0 tests are performed, they check basic functionalities and are quick to perform. + 1: Only level 1 tests are performed, they check functionalities of medium complexity. + 2: Only level 2 tests are performed, they check advanced functionalities and are slow to perform. + +tags: + - slack +''' +import os +import time + +import pytest +from wazuh_testing import global_parameters +from wazuh_testing.tools import WAZUH_PATH, LOG_FILE_PATH, ALERT_FILE_PATH +from wazuh_testing.tools.file import remove_file, copy +from wazuh_testing.tools.local_actions import run_local_command_returning_output +from wazuh_testing.modules import integratord as integrator +from wazuh_testing.tools.configuration import get_test_cases_data, load_configuration_template +from wazuh_testing.tools.monitoring import FileMonitor +from wazuh_testing.modules.integratord import event_monitor as evm + + +def replace_webhook_url(ids, configurations): + '''Replace the Webhook URL in each test case configuration parameters. + + Args: + ids (list): List of ids of test cases. + configurations (list): List of test's configuration parameters. + + Returns: + configurations (list): List of configurations. + ''' + for i in range(0, len(ids)): + configurations[i]['WEBHOOK_URL'] = global_parameters.slack_webhook_url + + return configurations + + +# Marks +pytestmark = [pytest.mark.server] + +# Reference paths +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration') +TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') + +# Configuration and test cases paths +configurations_template = os.path.join(CONFIGURATIONS_PATH, 'configuration_alerts_reading.yaml') +t1_cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_change_inode_alert.yaml') +t2_cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_read_valid_json_alerts.yaml') +t3_cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_read_invalid_json_alerts.yaml') + +# Get configurations and test cases +t1_config_params, t1_metadata, t1_cases_ids = get_test_cases_data(t1_cases_path) +t2_config_params, t2_metadata, t2_cases_ids = get_test_cases_data(t2_cases_path) +t3_config_params, t3_metadata, t3_cases_ids = get_test_cases_data(t3_cases_path) + +t1_config_params = replace_webhook_url(t1_cases_ids, t1_config_params) +t2_config_params = replace_webhook_url(t2_cases_ids, t2_config_params) +t3_config_params = replace_webhook_url(t3_cases_ids, t3_config_params) + +# Load tests configurations +t1_config = load_configuration_template(configurations_template, t1_config_params, t1_metadata) +t2_config = load_configuration_template(configurations_template, t2_config_params, t2_metadata) +t3_config = load_configuration_template(configurations_template, t3_config_params, t3_metadata) + +# Variables +TEMP_FILE_PATH = os.path.join(WAZUH_PATH, 'logs/alerts/alerts.json.tmp') +daemons_handler_configuration = {'daemons': integrator.REQUIRED_DAEMONS} +local_internal_options = {'integrator.debug': '2', 'analysisd.debug': '1', 'monitord.rotate_log': '0'} + + +# Tests +@pytest.mark.tier(level=1) +@pytest.mark.parametrize('configuration, metadata', zip(t1_config, t1_metadata), ids=t1_cases_ids) +def test_integratord_change_json_inode(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, + configure_local_internal_options_module, daemons_handler_function, + wait_for_start_module): + ''' + description: Check that wazuh-integratord detects a change in the inode of the alerts.json and continues reading + alerts. + + test_phases: + - setup: + - Apply ossec.conf configuration changes according to the configuration template and use case. + - Truncate Wazuh's logs. + - Configure internal options. + - Restart the daemons defined in `daemons_handler_configuration`. + - Wait for the restarted modules to start correctly. + - test: + - Wait until integratord is ready to read alerts. + - Insert an alert in the `alerts.json` file. + - Check if the alert was received by Slack. + - Replace the `alerts.json` file while wazuh-integratord is reading it. + - Wait for the inode change to be detected by wazuh-integratord. + - Check if wazuh-integratord detects that the file's inode has changed. + - Insert an alert in the `alerts.json` file. + - Check if the alert is processed. + - Check alert was received by Slack. + - teardown: + - Truncate Wazuh's logs. + - Restore initial configuration, both `ossec.conf` and `local_internal_options.conf`. + + wazuh_min_version: 4.3.5 + + tier: 1 + + parameters: + - configuration: + type: dict + brief: Configuration loaded from `configuration_template`. + - metadata: + type: dict + brief: Test case metadata. + - set_wazuh_configuration: + type: fixture + brief: Set wazuh configuration. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - configure_local_internal_options_module: + type: fixture + brief: Configure the local internal options file. + - daemons_handler_function: + type: fixture + brief: Handler of Wazuh daemons. + - wait_for_start_module: + type: fixture + brief: Detect the start of the Integratord module in the ossec.log + + assertions: + - Verify the expected response with for a given alert is recieved + + input_description: + - The `configuration_integratord_read_json_alerts.yaml` file provides the module configuration for this test. + - The `cases_integratord_read_json_alerts` file provides the test cases. + + expected_output: + - r'.+wazuh-integratord.*DEBUG: jqueue_next.*Alert file inode changed.*' + - r'.+wazuh-integratord.*Processing alert.*' + - r'.+wazuh-integratord.*' + ''' + wazuh_monitor = FileMonitor(LOG_FILE_PATH) + command = f"echo '{metadata['alert_sample']}' >> {ALERT_FILE_PATH}" + + # Wait until integratord is ready to read alerts + time.sleep(integrator.TIME_TO_DETECT_FILE) + + # Insert a new alert + run_local_command_returning_output(command) + + evm.check_third_party_response(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout) + + # Change file to change inode + copy(ALERT_FILE_PATH, TEMP_FILE_PATH) + remove_file(ALERT_FILE_PATH) + copy(TEMP_FILE_PATH, ALERT_FILE_PATH) + + # Wait for Inode change to be detected + # The `integratord` library tries to read alerts from the file every 1 second. So, the test waits 1 second + 1 + # until the file is reloaded. + time.sleep(integrator.TIME_TO_DETECT_FILE) + + evm.check_file_inode_changed(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout) + + # Insert a new alert + run_local_command_returning_output(command) + + evm.check_alert_processing(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout) + + # Check if the alert was correctly sent to Slack + evm.check_third_party_response(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout) + + +@pytest.mark.tier(level=1) +@pytest.mark.parametrize('configuration, metadata', zip(t2_config, t2_metadata), ids=t2_cases_ids) +def test_integratord_read_valid_alerts(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, + configure_local_internal_options_module, daemons_handler_function, + wait_for_start_module): + ''' + description: Check that when a given alert is inserted into alerts.json, integratord works as expected. In case + of a valid alert, a slack integration alert is expected in the alerts.json file. + + test_phases: + - setup: + - Apply ossec.conf configuration changes according to the configuration template and use case. + - Truncate Wazuh's logs. + - Configure internal options. + - Restart the daemons defined in `daemons_handler_configuration`. + - Wait for the restarted modules to start correctly. + - test: + - Insert a valid alert in the alerts.json file. + - Check if the alert was received by Slack correctly (HTTP response status code: 200) + - teardown: + - Truncate Wazuh's logs. + - Restore initial configuration, both `ossec.conf` and `local_internal_options.conf`. + + wazuh_min_version: 4.3.7 + + tier: 1 + + parameters: + - configuration: + type: dict + brief: Configuration loaded from `configuration_template`. + - metadata: + type: dict + brief: Test case metadata. + - set_wazuh_configuration: + type: fixture + brief: Set wazuh configuration. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - configure_local_internal_options_module: + type: fixture + brief: Configure the local internal options file. + - daemons_handler_function: + type: fixture + brief: Handler of Wazuh daemons. + - wait_for_start_module: + type: fixture + brief: Detect the start of the Integratord module in the ossec.log + + assertions: + - Verify the expected response with for a given alert is recieved + + input_description: + - The `configuration_integratord_read_json_alerts.yaml` file provides the module configuration for this test. + - The `cases_integratord_read_valid_json_alerts` file provides the test cases. + + expected_output: + - r'.+wazuh-integratord.*alert_id.*\"integration\": \"slack\".*' + ''' + sample = metadata['alert_sample'] + wazuh_monitor = FileMonitor(LOG_FILE_PATH) + + run_local_command_returning_output(f"echo '{sample}' >> {ALERT_FILE_PATH}") + + # Read Response in ossec.log + evm.check_third_party_response(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout) + + +@pytest.mark.tier(level=1) +@pytest.mark.parametrize('configuration, metadata', zip(t3_config, t3_metadata), ids=t3_cases_ids) +def test_integratord_read_invalid_alerts(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, + configure_local_internal_options_module, daemons_handler_function, + wait_for_start_module): + ''' + description: Check that when a given alert is inserted into alerts.json, integratord works as expected. If the alert + is invalid, broken, or overlong a message will appear in the ossec.log file. + + test_phases: + - setup: + - Apply ossec.conf configuration changes according to the configuration template and use case. + - Truncate Wazuh's logs. + - Configure internal options. + - Restart the daemons defined in `daemons_handler_configuration`. + - Wait for the restarted modules to start correctly. + - test: + - Insert an invalid alert in the alerts.json file. + - Check if wazuh-integratord process the alert and report an error. + - teardown: + - Truncate Wazuh's logs. + - Restore initial configuration, both `ossec.conf` and `local_internal_options.conf`. + + wazuh_min_version: 4.3.7 + + tier: 1 + + parameters: + - configuration: + type: dict + brief: Configuration loaded from `configuration_template`. + - metadata: + type: dict + brief: Test case metadata. + - set_wazuh_configuration: + type: fixture + brief: Set wazuh configuration. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - configure_local_internal_options_module: + type: fixture + brief: Configure the local internal options file. + - daemons_handler_function: + type: fixture + brief: Handler of Wazuh daemons. + - wait_for_start_module: + type: fixture + brief: Detect the start of the Integratord module in the ossec.log + + assertions: + - Verify the expected response with for a given alert is recieved + + input_description: + - The `configuration_integratord_read_json_alerts.yaml` file provides the module configuration for this test. + - The `cases_integratord_read_invalid_json_alerts` file provides the test cases. + + expected_output: + - r'.+wazuh-integratord.*WARNING: Invalid JSON alert read.*' + - r'.+wazuh-integratord.*WARNING: Overlong JSON alert read.*' + + ''' + sample = metadata['alert_sample'] + wazuh_monitor = FileMonitor(LOG_FILE_PATH) + + if metadata['alert_type'] == 'invalid': + callback = integrator.CB_INVALID_ALERT_READ + else: + callback = integrator.CB_OVERLONG_ALERT_READ + # Add 90kb of padding to alert to make it go over the allowed value of 64KB. + padding = "0" * 90000 + sample = sample.replace("padding_input", "agent_" + padding) + + run_local_command_returning_output(f"echo '{sample}' >> {ALERT_FILE_PATH}") + + # Read Response in ossec.log + evm.check_invalid_alert_read(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout, + callback=callback, + error_message=f"Did not recieve the expected '{callback}' event") diff --git a/tests/integration/test_integratord/test_integratord_change_inode_alert.py b/tests/integration/test_integratord/test_integratord_change_inode_alert.py deleted file mode 100644 index 4c35546427..0000000000 --- a/tests/integration/test_integratord/test_integratord_change_inode_alert.py +++ /dev/null @@ -1,166 +0,0 @@ -''' -copyright: Copyright (C) 2015-2022, Wazuh Inc. - Created by Wazuh, Inc. . - This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 - - -type: integration - -brief: Integratord manages wazuh integrations with other applications such as Yara or Virustotal, by feeding -the integrated aplications with the alerts located in alerts.json file. This test module aims to validate that -given a specific alert, the expected response is recieved, depending if it is a valid/invalid json alert, an -overlong alert (64kb+) or what happens when it cannot read the file because it is missing. - -components: - - integratord - -suite: integratord_change_inode_alert - -targets: - - agent - -daemons: - - wazuh-integratord - -os_platform: - - Linux - -os_version: - - Centos 8 - - Ubuntu Focal - -references: - - https://documentation.wazuh.com/current/user-manual/capabilities/virustotal-scan/integration.html - - https://documentation.wazuh.com/current/user-manual/reference/daemons/wazuh-integratord.htm - -pytest_args: - - tier: - 0: Only level 0 tests are performed, they check basic functionalities and are quick to perform. - 1: Only level 1 tests are performed, they check functionalities of medium complexity. - 2: Only level 2 tests are performed, they check advanced functionalities and are slow to perform. - -tags: - - virustotal -''' -import os -import time -import pytest -from wazuh_testing import global_parameters -from wazuh_testing.tools import WAZUH_PATH, LOG_FILE_PATH, ALERT_FILE_PATH -from wazuh_testing.tools.file import remove_file, copy -from wazuh_testing.tools.local_actions import run_local_command_returning_output -from wazuh_testing.modules import integratord as integrator -from wazuh_testing.modules.integratord.event_monitor import check_integratord_event -from wazuh_testing.tools.configuration import get_test_cases_data, load_configuration_template -from wazuh_testing.tools.monitoring import FileMonitor, generate_monitoring_callback - - -# Marks -pytestmark = [pytest.mark.server] - -# Reference paths -TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') -CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') -TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') - -# Configuration and cases data -configurations_path = os.path.join(CONFIGURATIONS_PATH, 'config_integratord_read_json_alerts.yaml') -cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_change_inode_alert.yaml') - -# Configurations -configuration_parameters, configuration_metadata, case_ids = get_test_cases_data(cases_path) -configuration_parameters[0]['API_KEY'] = global_parameters.integration_api_key -configurations = load_configuration_template(configurations_path, configuration_parameters, - configuration_metadata) -local_internal_options = {'integrator.debug': '2'} - -# Variables -TEMP_FILE_PATH = os.path.join(WAZUH_PATH, 'logs/alerts/alerts.json.tmp') - - -# Tests -@pytest.mark.tier(level=1) -@pytest.mark.parametrize('configuration, metadata', - zip(configurations, configuration_metadata), ids=case_ids) -def test_integratord_change_json_inode(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, - configure_local_internal_options_module, restart_wazuh_daemon_function, - wait_for_start_module): - ''' - description: Check that if when reading the alerts.json file, the inode for the file changes, integratord will - reload the file and continue reading from it. - - test_phases: - - Insert an alert alerts.json file. - - Replace the alerts.json file while it being read. - - Check integratord detects the file's inode has changed. - - Wait for integratord to start reading from the file again. - - Insert an alert - - Check virustotal response is added in ossec.log - - wazuh_min_version: 4.3.7 - - tier: 1 - - parameters: - - configuration: - type: dict - brief: Configuration loaded from `configuration_template`. - - metadata: - type: dict - brief: Test case metadata. - - set_wazuh_configuration: - type: fixture - brief: Set wazuh configuration. - - truncate_monitored_files: - type: fixture - brief: Truncate all the log files and json alerts files before and after the test execution. - - configure_local_internal_options_module: - type: fixture - brief: Configure the local internal options file. - - restart_wazuh_daemon_function: - type: fixture - brief: Restart wazuh's daemon before starting a test. - - wait_for_start_module: - type: fixture - brief: Detect the start of the Integratord module in the ossec.log - - assertions: - - Verify the expected response with for a given alert is recieved - - input_description: - - The `config_integratord_read_json_alerts.yaml` file provides the module configuration for this test. - - The `cases_integratord_read_json_alerts` file provides the test cases. - - expected_output: - - r'.*(wazuh-integratord.*DEBUG: jqueue_next.*Alert file inode changed).*' - - ''' - wazuh_monitor = FileMonitor(LOG_FILE_PATH) - command = f"echo '{metadata['alert_sample']}' >> {ALERT_FILE_PATH}" - # Insert Alerts - run_local_command_returning_output(command) - - # Get that alert is read - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout, - callback=generate_monitoring_callback(integrator.CB_INTEGRATORD_SENDING_ALERT), - error_message=integrator.ERR_MSG_SENDING_ALERT_NOT_FOUND, - update_position=False) - - # Change file to change inode - copy(ALERT_FILE_PATH, TEMP_FILE_PATH) - remove_file(ALERT_FILE_PATH) - copy(TEMP_FILE_PATH, ALERT_FILE_PATH) - - # Wait for Inode change to be detected and insert new alert - time.sleep(3) - run_local_command_returning_output(command) - - # Monitor Inode Changed - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout * 2, - callback=generate_monitoring_callback(integrator.CB_ALERTS_FILE_INODE_CHANGED), - error_message=integrator.ERR_MSG_ALERT_INODE_CHANGED_NOT_FOUND) - - # Read Response in ossec.log - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout, - callback=generate_monitoring_callback(integrator.CB_PROCESSING_ALERT), - error_message=integrator.ERR_MSG_VIRUSTOTAL_ALERT_NOT_DETECTED) diff --git a/tests/integration/test_integratord/test_integratord_read_json_alerts.py b/tests/integration/test_integratord/test_integratord_read_json_alerts.py deleted file mode 100644 index 6a3a3e3dff..0000000000 --- a/tests/integration/test_integratord/test_integratord_read_json_alerts.py +++ /dev/null @@ -1,212 +0,0 @@ -''' -copyright: Copyright (C) 2015-2022, Wazuh Inc. - Created by Wazuh, Inc. . - This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 - -type: integration - -brief: Integratord manages wazuh integrations with other applications such as Yara or Virustotal, by feeding -the integrated aplications with the alerts located in alerts.json file. This test module aims to validate that -given a specific alert, the expected response is recieved, depending if it is a valid/invalid json alert, an -overlong alert (64kb+) or what happens when it cannot read the file because it is missing. - -components: - - integratord - -suite: integratord_read_json_alerts - -targets: - - agent - -daemons: - - wazuh-integratord - -os_platform: - - Linux - -os_version: - - Centos 8 - - Ubuntu Focal - -references: - - https://documentation.wazuh.com/current/user-manual/capabilities/virustotal-scan/integration.html - - https://documentation.wazuh.com/current/user-manual/reference/daemons/wazuh-integratord.htm - -pytest_args: - - tier: - 0: Only level 0 tests are performed, they check basic functionalities and are quick to perform. - 1: Only level 1 tests are performed, they check functionalities of medium complexity. - 2: Only level 2 tests are performed, they check advanced functionalities and are slow to perform. - -tags: - - virustotal -''' -import os -import pytest -import time - -from wazuh_testing import global_parameters -from wazuh_testing.tools import LOG_FILE_PATH, ALERT_FILE_PATH -from wazuh_testing.modules import integratord as integrator -from wazuh_testing.modules.integratord.event_monitor import check_integratord_event -from wazuh_testing.tools.local_actions import run_local_command_returning_output -from wazuh_testing.tools.configuration import get_test_cases_data, load_configuration_template -from wazuh_testing.tools.monitoring import FileMonitor, generate_monitoring_callback - - -# Marks -pytestmark = [pytest.mark.server] - -# Reference paths -TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') -CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') -TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') - -# Configuration and cases data -configurations_path = os.path.join(CONFIGURATIONS_PATH, 'config_integratord_read_json_alerts.yaml') -t1_cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_read_valid_json_alerts.yaml') -t2_cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_read_invalid_json_alerts.yaml') - - -# Configurations -t1_configuration_parameters, t1_configuration_metadata, t1_case_ids = get_test_cases_data(t1_cases_path) -t1_configuration_parameters[0]['API_KEY'] = global_parameters.integration_api_key -t1_configurations = load_configuration_template(configurations_path, t1_configuration_parameters, - t1_configuration_metadata) -t2_configuration_parameters, t2_configuration_metadata, t2_case_ids = get_test_cases_data(t2_cases_path) -t2_configuration_parameters[0]['API_KEY'] = global_parameters.integration_api_key -t2_configurations = load_configuration_template(configurations_path, t2_configuration_parameters, - t2_configuration_metadata) -local_internal_options = {'integrator.debug': '2'} - - -# Tests -@pytest.mark.tier(level=1) -@pytest.mark.parametrize('configuration, metadata', - zip(t1_configurations, t1_configuration_metadata), ids=t1_case_ids) -def test_integratord_read_valid_alerts(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, - configure_local_internal_options_module, restart_wazuh_daemon_function, - wait_for_start_module): - ''' - description: Check that when a given alert is inserted into alerts.json, integratord works as expected. In case - of a valid alert, a virustotal integration alert is expected in the alerts.json file. - wazuh_min_version: 4.3.7 - - test_phases: - - Insert an alert alerts.json file. - - Check virustotal response is added in ossec.log - - tier: 1 - - parameters: - - configuration: - type: dict - brief: Configuration loaded from `configuration_template`. - - metadata: - type: dict - brief: Test case metadata. - - set_wazuh_configuration: - type: fixture - brief: Set wazuh configuration. - - truncate_monitored_files: - type: fixture - brief: Truncate all the log files and json alerts files before and after the test execution. - - configure_local_internal_options_module: - type: fixture - brief: Configure the local internal options file. - - restart_wazuh_daemon_function: - type: fixture - brief: Restart wazuh daemon before starting a test. - - wait_for_start_module: - type: fixture - brief: Detect the start of the Integratord module in the ossec.log - - assertions: - - Verify the expected response with for a given alert is recieved - - input_description: - - The `config_integratord_read_json_alerts.yaml` file provides the module configuration for this test. - - The `cases_integratord_read_valid_json_alerts` file provides the test cases. - - expected_output: - - r'.*wazuh-integratord.*alert_id.*\"integration\": \"virustotal\".*' - ''' - - sample = metadata['alert_sample'] - wazuh_monitor = FileMonitor(LOG_FILE_PATH) - run_local_command_returning_output(f"echo '{sample}' >> {ALERT_FILE_PATH}") - - # Read Response in ossec.log - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout, - callback=generate_monitoring_callback(integrator.CB_VIRUSTOTAL_ALERT), - error_message=integrator.ERR_MSG_VIRUSTOTAL_ALERT_NOT_DETECTED) - - -@pytest.mark.tier(level=1) -@pytest.mark.parametrize('configuration, metadata', - zip(t2_configurations, t2_configuration_metadata), ids=t2_case_ids) -def test_integratord_read_invalid_alerts(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, - configure_local_internal_options_module, restart_wazuh_daemon_function, - wait_for_start_module): - ''' - description: Check that when a given alert is inserted into alerts.json, integratord works as expected. In case - of a valid alert, a virustotal integration alert is expected in the alerts.json file. If the alert is invalid or - broken, or overly long a message will appear in the ossec.log file. - wazuh_min_version: 4.3.7 - - test_phases: - - Insert an alert alerts.json file. - - Check that the expected response message is given for an invalid alert. - - tier: 1 - parameters: - - configuration: - type: dict - brief: Configuration loaded from `configuration_template`. - - metadata: - type: dict - brief: Test case metadata. - - set_wazuh_configuration: - type: fixture - brief: Set wazuh configuration. - - truncate_monitored_files: - type: fixture - brief: Truncate all the log files and json alerts files before and after the test execution. - - configure_local_internal_options_module: - type: fixture - brief: Configure the local internal options file. - - restart_wazuh_daemon_function: - type: fixture - brief: Restart wazuh daemon before starting a test. - - wait_for_start_module: - type: fixture - brief: Detect the start of the Integratord module in the ossec.log - assertions: - - Verify the expected response with for a given alert is recieved - input_description: - - The `config_integratord_read_json_alerts.yaml` file provides the module configuration for this test. - - The `cases_integratord_read_invalid_json_alerts` file provides the test cases. - expected_output: - - r'.*wazuh-integratord.*WARNING: Invalid JSON alert read.*' - - r'.*wazuh-integratord.*WARNING: Overlong JSON alert read.*' - - ''' - sample = metadata['alert_sample'] - wazuh_monitor = FileMonitor(LOG_FILE_PATH) - - if metadata['alert_type'] == 'invalid': - callback = integrator.CB_INVALID_JSON_ALERT_READ - error_message = integrator.ERR_MSG_INVALID_ALERT_NOT_FOUND - - elif metadata['alert_type'] == 'overlong': - callback = integrator.CB_OVERLONG_JSON_ALERT_READ - error_message = integrator. ERR_MSG_OVERLONG_ALERT_NOT_FOUND - # Add 90kb of padding to alert to make it go over the allowed value of 64KB. - padding = "0"*90000 - sample = sample.replace("padding_input", "agent_" + padding) - - run_local_command_returning_output(f"echo '{sample}' >> {ALERT_FILE_PATH}") - - # Read Response in ossec.log - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout, - callback=generate_monitoring_callback(callback), error_message=error_message) diff --git a/tests/integration/test_integratord/test_integratord_read_json_file_deleted.py b/tests/integration/test_integratord/test_integratord_read_json_file_deleted.py deleted file mode 100644 index d9dfc68c56..0000000000 --- a/tests/integration/test_integratord/test_integratord_read_json_file_deleted.py +++ /dev/null @@ -1,154 +0,0 @@ -''' -copyright: Copyright (C) 2015-2022, Wazuh Inc. - Created by Wazuh, Inc. . - This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 - - -type: integration - -brief: Integratord manages wazuh integrations with other applications such as Yara or Virustotal, by feeding -the integrated aplications with the alerts located in alerts.json file. This test module aims to validate that -given a specific alert, the expected response is recieved, depending if it is a valid/invalid json alert, an -overlong alert (64kb+) or what happens when it cannot read the file because it is missing. - -components: - - integratord - -suite: integratord_read_json_file_deleted - -targets: - - agent - -daemons: - - wazuh-integratord - -os_platform: - - Linux - -os_version: - - Centos 8 - - Ubuntu Focal - -references: - - https://documentation.wazuh.com/current/user-manual/capabilities/virustotal-scan/integration.html - - https://documentation.wazuh.com/current/user-manual/reference/daemons/wazuh-integratord.htm - -pytest_args: - - tier: - 0: Only level 0 tests are performed, they check basic functionalities and are quick to perform. - 1: Only level 1 tests are performed, they check functionalities of medium complexity. - 2: Only level 2 tests are performed, they check advanced functionalities and are slow to perform. - -tags: - - virustotal -''' -import os -import time -import pytest - -from wazuh_testing import global_parameters -from wazuh_testing.tools import LOG_FILE_PATH, ALERT_FILE_PATH -from wazuh_testing.tools.file import remove_file -from wazuh_testing.modules import integratord as integrator -from wazuh_testing.modules.integratord.event_monitor import check_integratord_event -from wazuh_testing.tools.local_actions import run_local_command_returning_output -from wazuh_testing.tools.configuration import get_test_cases_data, load_configuration_template -from wazuh_testing.tools.monitoring import FileMonitor, generate_monitoring_callback - - -# Marks -pytestmark = [pytest.mark.server] - -# Reference paths -TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') -CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') -TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') - -# Configuration and cases data -configurations_path = os.path.join(CONFIGURATIONS_PATH, 'config_integratord_read_json_alerts.yaml') -cases_path = os.path.join(TEST_CASES_PATH, 'cases_integratord_read_json_file_deleted.yaml') - -# Configurations -configuration_parameters, configuration_metadata, case_ids = get_test_cases_data(cases_path) -configuration_parameters[0]['API_KEY'] = global_parameters.integration_api_key -configurations = load_configuration_template(configurations_path, configuration_parameters, - configuration_metadata) -local_internal_options = {'integrator.debug': '2'} - - -# Tests -@pytest.mark.tier(level=1) -@pytest.mark.parametrize('configuration, metadata', - zip(configurations, configuration_metadata), ids=case_ids) -def test_integratord_read_json_file_deleted(configuration, metadata, set_wazuh_configuration, truncate_monitored_files, - configure_local_internal_options_module, restart_wazuh_daemon_function, - wait_for_start_module): - ''' - description: Check that if while integratord is reading from the alerts.json file, it is deleted, the expected - error message is displayed, and if the file is created again and alerts are inserted, integratord continues - working and alerts are read. - wazuh_min_version: 4.3.7 - - test_phases: - - Remove alerts.json file. - - Wait for integratord to detect the file was removed. - - Create new alerts.json file. - - Wait for the new file to be detected. - - Insert an alert - - Check virustotal response is added in ossec.log - - tier: 1 - - parameters: - - configuration: - type: dict - brief: Configuration loaded from `configuration_template`. - - metadata: - type: dict - brief: Test case metadata. - - set_wazuh_configuration: - type: fixture - brief: Set wazuh configuration. - - truncate_monitored_files: - type: fixture - brief: Truncate all the log files and json alerts files before and after the test execution. - - configure_local_internal_options_module: - type: fixture - brief: Configure the local internal options file. - - restart_wazuh_daemon_function: - type: fixture - brief: Restart wazuh daemon before starting a test. - - wait_for_start_module: - type: fixture - brief: Detect the start of the Integratord module in the ossec.log - - assertions: - - Verify the expected response with for a given alert is recieved - - input_description: - - The `config_integratord_read_json_alerts.yaml` file provides the module configuration for this test. - - The `cases_integratord_read_json_file_deleted` file provides the test cases. - - expected_output: - - r'.*wazuh-integratord.*ERROR.*Could not retrieve information of file.*alerts.json.*No such file.*' - - r'.*wazuh-integratord.*alert_id.*\"integration\": \"virustotal\".*' - ''' - wazuh_monitor = FileMonitor(LOG_FILE_PATH) - command = f"touch {ALERT_FILE_PATH} && chmod 640 {ALERT_FILE_PATH} && chown wazuh:wazuh {ALERT_FILE_PATH}" - - remove_file(ALERT_FILE_PATH) - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout*2, - callback=generate_monitoring_callback(integrator.CB_CANNOT_RETRIEVE_JSON_FILE), - error_message=integrator.ERR_MSG_CANNOT_RETRIEVE_MSG_NOT_FOUND) - - # Create file new alerts.json file. - run_local_command_returning_output(command) - - # Wait for Integratord to detect the file before the inserting the alert - time.sleep(2) - run_local_command_returning_output(f"echo '{metadata['alert_sample']}' >> {ALERT_FILE_PATH}") - - # Read Response in ossec.log - check_integratord_event(file_monitor=wazuh_monitor, timeout=global_parameters.default_timeout*2, - callback=generate_monitoring_callback(integrator.CB_VIRUSTOTAL_ALERT), - error_message=integrator.ERR_MSG_VIRUSTOTAL_ALERT_NOT_DETECTED) diff --git a/tests/integration/test_logcollector/test_localfile/test_invalid_agent_localfile_config.py b/tests/integration/test_logcollector/test_localfile/test_invalid_agent_localfile_config.py index bcd8d09ee1..4dd041fea9 100644 --- a/tests/integration/test_logcollector/test_localfile/test_invalid_agent_localfile_config.py +++ b/tests/integration/test_logcollector/test_localfile/test_invalid_agent_localfile_config.py @@ -111,12 +111,12 @@ ] parameters = [ - { 'LOCATION': os.path.join(temp_dir, 'wazuh-testing', files[0]), 'LOG_FORMAT': None }, - { 'LOCATION': None, 'LOG_FORMAT': 'syslog' }, + {'LOCATION': os.path.join(temp_dir, 'wazuh-testing', files[0]), 'LOG_FORMAT': None}, + {'LOCATION': None, 'LOG_FORMAT': 'syslog'}, ] metadata = lower_case_key_dictionary_array(parameters) -tcase_ids = [f"location_{'None' if param['LOCATION'] is None else files[0]}_" \ +tcase_ids = [f"location_{'None' if param['LOCATION'] is None else files[0]}_" f"logformat_{'None' if param['LOG_FORMAT'] is None else param['LOG_FORMAT']}" for param in parameters] configurations_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'invalid_agent_conf.yaml') configurations = load_wazuh_configurations(configurations_path, __name__, params=parameters, metadata=metadata) @@ -133,10 +133,11 @@ def get_configuration(request): """Get configurations from the module.""" return request.param + # Tests def test_invalid_agent_localfile_config(get_files_list, create_file_structure_module, get_configuration, set_agent_conf, - daemons_handler): + daemons_handler_module): ''' description: Check if the expected message is present in the ossec.log when an invalid configuration is set and if the Wazuh continues running. @@ -156,7 +157,7 @@ def test_invalid_agent_localfile_config(get_files_list, create_file_structure_mo - set_agent_conf: type: fixture brief: Set a new configuration in 'agent.conf' file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. diff --git a/tests/integration/test_logcollector/test_localfile/test_invalid_wazuh_conf.py b/tests/integration/test_logcollector/test_localfile/test_invalid_wazuh_conf.py index 894acda4b6..ab94e7f6dd 100644 --- a/tests/integration/test_logcollector/test_localfile/test_invalid_wazuh_conf.py +++ b/tests/integration/test_logcollector/test_localfile/test_invalid_wazuh_conf.py @@ -116,12 +116,12 @@ ] parameters = [ - { 'LOCATION': os.path.join(temp_dir, 'wazuh-testing', files[0]), 'LOG_FORMAT': None }, - { 'LOCATION': None, 'LOG_FORMAT': 'syslog' }, + {'LOCATION': os.path.join(temp_dir, 'wazuh-testing', files[0]), 'LOG_FORMAT': None}, + {'LOCATION': None, 'LOG_FORMAT': 'syslog'}, ] metadata = lower_case_key_dictionary_array(parameters) -tcase_ids = [f"location_{'None' if param['LOCATION'] is None else files[0]}_" \ +tcase_ids = [f"location_{'None' if param['LOCATION'] is None else files[0]}_" f"logformat_{'None' if param['LOG_FORMAT'] is None else param['LOG_FORMAT']}" for param in parameters] configurations_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'invalid_wazuh_conf.yaml') configurations = load_wazuh_configurations(configurations_path, __name__, params=parameters, metadata=metadata) @@ -151,7 +151,7 @@ def remove_empty_options(get_configuration): def test_invalid_wazuh_conf(get_files_list, create_file_structure_module, get_configuration, remove_empty_options, - configure_environment, daemons_handler): + configure_environment, daemons_handler_module): ''' description: Check if the expected message is present in the ossec.log when an invalid configuration is set and if Wazuh refuses to restart. @@ -174,7 +174,7 @@ def test_invalid_wazuh_conf(get_files_list, create_file_structure_module, get_co - configure_environment: type: fixture brief: Configure a custom environment for testing. Restart Wazuh is needed for applying the configuration. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. @@ -191,7 +191,7 @@ def test_invalid_wazuh_conf(get_files_list, create_file_structure_module, get_co - logcollector ''' wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) - + check_daemon_status(target_daemon=LOGCOLLECTOR_DAEMON, running_condition=False) wazuh_log_monitor.start(timeout=LOG_COLLECTOR_GLOBAL_TIMEOUT, callback=callback_missing_element_error, diff --git a/tests/integration/test_logcollector/test_log_filter_options/test_ignore_regex.py b/tests/integration/test_logcollector/test_log_filter_options/test_ignore_regex.py index f83b572aed..b4bf2cba53 100644 --- a/tests/integration/test_logcollector/test_log_filter_options/test_ignore_regex.py +++ b/tests/integration/test_logcollector/test_log_filter_options/test_ignore_regex.py @@ -109,7 +109,7 @@ def test_ignore_multiple_regex(configuration, metadata, new_file_path, create_fi - Restore ossec.conf and local_internal_options.conf - Stop Wazuh - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_logcollector/test_log_filter_options/test_restrict_ignore_regex.py b/tests/integration/test_logcollector/test_log_filter_options/test_restrict_ignore_regex.py index 2d9347bb19..72545f960d 100644 --- a/tests/integration/test_logcollector/test_log_filter_options/test_restrict_ignore_regex.py +++ b/tests/integration/test_logcollector/test_log_filter_options/test_restrict_ignore_regex.py @@ -111,7 +111,7 @@ def test_restrict_ignore_regex_values(configuration, metadata, new_file_path, cr - Restore ossec.conf and local_internal_options.conf - Stop Wazuh - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_logcollector/test_log_filter_options/test_restrict_regex.py b/tests/integration/test_logcollector/test_log_filter_options/test_restrict_regex.py index 6aace6b990..ed2564a350 100644 --- a/tests/integration/test_logcollector/test_log_filter_options/test_restrict_regex.py +++ b/tests/integration/test_logcollector/test_log_filter_options/test_restrict_regex.py @@ -109,7 +109,7 @@ def test_restrict_multiple_regex(configuration, metadata, new_file_path, create_ - Restore ossec.conf and local_internal_options.conf - Stop Wazuh - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_logcollector/test_macos/test_macos_file_status_basic.py b/tests/integration/test_logcollector/test_macos/test_macos_file_status_basic.py index c823af513e..dd6f7c9f4c 100644 --- a/tests/integration/test_logcollector/test_macos/test_macos_file_status_basic.py +++ b/tests/integration/test_logcollector/test_macos/test_macos_file_status_basic.py @@ -95,7 +95,7 @@ def get_configuration(request): def test_macos_file_status_basic(restart_logcollector_required_daemons_package, truncate_log_file, delete_file_status_json, configure_local_internal_options_module, - get_configuration, configure_environment, file_monitoring, daemons_handler): + get_configuration, configure_environment, file_monitoring, daemons_handler_module): ''' description: Check if the 'wazuh-logcollector' builds and updates the 'file_status.json' file from ULS events. For this purpose, the test will configure a 'localfile' section using the macOS settings. @@ -130,7 +130,7 @@ def test_macos_file_status_basic(restart_logcollector_required_daemons_package, - file_monitoring: type: fixture brief: Handle the monitoring of a specified file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. diff --git a/tests/integration/test_logcollector/test_macos/test_macos_file_status_predicate.py b/tests/integration/test_logcollector/test_macos/test_macos_file_status_predicate.py index 204fa1aa8a..878c293525 100644 --- a/tests/integration/test_logcollector/test_macos/test_macos_file_status_predicate.py +++ b/tests/integration/test_logcollector/test_macos/test_macos_file_status_predicate.py @@ -86,7 +86,7 @@ def test_macos_file_status_predicate(restart_logcollector_required_daemons_packa delete_file_status_json, configure_local_internal_options_module, get_configuration, configure_environment, - file_monitoring, daemons_handler): + file_monitoring, daemons_handler_module): """ description: Check if the 'wazuh-logcollector' does not update the 'file_status.json' file from logging events when using an invalid predicate in the 'query' tag of the 'localfile' section. @@ -124,7 +124,7 @@ def test_macos_file_status_predicate(restart_logcollector_required_daemons_packa - file_monitoring: type: fixture brief: Handle the monitoring of a specified file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. diff --git a/tests/integration/test_logcollector/test_macos/test_macos_file_status_when_no_macos.py b/tests/integration/test_logcollector/test_macos/test_macos_file_status_when_no_macos.py index f360726463..bc4ff2d14c 100644 --- a/tests/integration/test_logcollector/test_macos/test_macos_file_status_when_no_macos.py +++ b/tests/integration/test_logcollector/test_macos/test_macos_file_status_when_no_macos.py @@ -52,7 +52,7 @@ from os import remove if sys.platform != 'win32': from wazuh_testing.tools import LOGCOLLECTOR_FILE_STATUS_PATH - + # Marks pytestmark = [pytest.mark.darwin, pytest.mark.tier(level=0)] @@ -98,7 +98,8 @@ def get_configuration(request): def test_macos_file_status_when_no_macos(restart_logcollector_required_daemons_package, truncate_log_file, handle_files, delete_file_status_json, configure_local_internal_options_module, - get_configuration, configure_environment, file_monitoring, daemons_handler): + get_configuration, configure_environment, file_monitoring, + daemons_handler_module): ''' description: Check if the 'wazuh-logcollector' does not store and removes if exists, previous macos-formatted localfile data in the 'file_status.json' file when the macOS localfile @@ -139,7 +140,7 @@ def test_macos_file_status_when_no_macos(restart_logcollector_required_daemons_p - file_monitoring: type: fixture brief: Handle the monitoring of a specified file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. diff --git a/tests/integration/test_logcollector/test_macos/test_macos_format_basic.py b/tests/integration/test_logcollector/test_macos/test_macos_format_basic.py index abfb5d20ce..bb6391efe5 100644 --- a/tests/integration/test_logcollector/test_macos/test_macos_format_basic.py +++ b/tests/integration/test_logcollector/test_macos/test_macos_format_basic.py @@ -99,8 +99,8 @@ def restart_logcollector_function(): @pytest.mark.parametrize('macos_message', macos_log_messages, ids=[log_message['id'] for log_message in macos_log_messages]) def test_macos_format_basic(restart_logcollector_required_daemons_package, get_configuration, configure_environment, - configure_local_internal_options_module, macos_message, file_monitoring, daemons_handler, - restart_logcollector_function): + configure_local_internal_options_module, macos_message, file_monitoring, + daemons_handler_module, restart_logcollector_function): ''' description: Check if the 'wazuh-logcollector' gathers properly macOS unified logging system (ULS) events. For this purpose, the test will configure a 'localfile' section using the macOS settings. @@ -133,7 +133,7 @@ def test_macos_format_basic(restart_logcollector_required_daemons_package, get_c - file_monitoring: type: fixture brief: Handle the monitoring of a specified file. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. - restart_logcollector_function: diff --git a/tests/integration/test_logcollector/test_macos/test_macos_format_only_future_events.py b/tests/integration/test_logcollector/test_macos/test_macos_format_only_future_events.py index d9551432f2..76bf4ee36a 100644 --- a/tests/integration/test_logcollector/test_macos/test_macos_format_only_future_events.py +++ b/tests/integration/test_logcollector/test_macos/test_macos_format_only_future_events.py @@ -99,7 +99,7 @@ def get_connection_configuration(): def test_macos_format_only_future_events(restart_logcollector_required_daemons_package, get_configuration, configure_environment, configure_local_internal_options_module, - file_monitoring, daemons_handler): + file_monitoring, daemons_handler_module): """ description: Check if the 'only-future-events' option is used properly by the 'wazuh-logcollector' when using the macOS unified logging system (ULS) events. For this purpose, the test will configure @@ -128,7 +128,7 @@ def test_macos_format_only_future_events(restart_logcollector_required_daemons_p - configure_local_internal_options_module: type: fixture brief: Set internal configuration for testing. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. - file_monitoring: diff --git a/tests/integration/test_logcollector/test_macos/test_macos_multiline_values.py b/tests/integration/test_logcollector/test_macos/test_macos_multiline_values.py index e6e531faa4..7ad92ced85 100644 --- a/tests/integration/test_logcollector/test_macos/test_macos_multiline_values.py +++ b/tests/integration/test_logcollector/test_macos/test_macos_multiline_values.py @@ -84,9 +84,9 @@ def get_connection_configuration(): @pytest.mark.parametrize('macos_message', macos_log_messages) -def test_macos_multiline_values(configure_local_internal_options_module, restart_logcollector_required_daemons_package, - get_configuration, configure_environment, macos_message, file_monitoring, - daemons_handler): +def test_macos_multiline_values(configure_local_internal_options_module, restart_logcollector_required_daemons_package, + get_configuration, configure_environment, macos_message, file_monitoring, + daemons_handler_module): ''' description: Check if the 'wazuh-logcollector' daemon collects multiline events from the macOS ULS (unified logging system). For this purpose, the test will configure a 'localfile' section @@ -115,7 +115,7 @@ def test_macos_multiline_values(configure_local_internal_options_module, restart - macos_message: type: dict brief: Dictionary with the testing macOS ULS event. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. - file_monitoring: @@ -146,7 +146,7 @@ def test_macos_multiline_values(configure_local_internal_options_module, restart multiline_message = macos_message['message'].split('\n')[:-1] multiline_logger = f"\"$(printf \"{macos_message['message']}\")\"" logcollector.generate_macos_logger_log(multiline_logger) - + for line in multiline_message: log_monitor.start(timeout=logcollector.LOG_COLLECTOR_GLOBAL_TIMEOUT, callback=logcollector.callback_read_macos_message(line), diff --git a/tests/integration/test_logcollector/test_statistics/test_statistics_macos.py b/tests/integration/test_logcollector/test_statistics/test_statistics_macos.py index bbc672cea4..f06af3c2e2 100644 --- a/tests/integration/test_logcollector/test_statistics/test_statistics_macos.py +++ b/tests/integration/test_logcollector/test_statistics/test_statistics_macos.py @@ -82,7 +82,7 @@ def get_configuration(request): def test_options_state_interval_no_file(configure_local_internal_options_module, get_configuration, - configure_environment, daemons_handler): + configure_environment, daemons_handler_module): ''' description: Check if the 'wazuh-logcollector' daemon updates the statistic file 'wazuh-logcollector.state' when using the macOS unified logging system (ULS). For this purpose, the test will configure @@ -105,7 +105,7 @@ def test_options_state_interval_no_file(configure_local_internal_options_module, - configure_environment: type: fixture brief: Configure a custom environment for testing. - - daemons_handler: + - daemons_handler_module: type: fixture brief: Handler of Wazuh daemons. diff --git a/tests/integration/test_sca/conftest.py b/tests/integration/test_sca/conftest.py new file mode 100644 index 0000000000..b174c7bea5 --- /dev/null +++ b/tests/integration/test_sca/conftest.py @@ -0,0 +1,53 @@ +import os +import pytest + +from wazuh_testing import LOG_FILE_PATH, CIS_RULESET_PATH +from wazuh_testing.modules import sca +from wazuh_testing.modules.sca import event_monitor as evm +from wazuh_testing.tools.file import copy, delete_file, copy_files_in_folder, delete_path_recursively +from wazuh_testing.tools.monitoring import FileMonitor + + +# Variables +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') + + +# Fixtures +@pytest.fixture() +def wait_for_sca_enabled(): + ''' + Wait for the sca module to start. + ''' + wazuh_monitor = FileMonitor(LOG_FILE_PATH) + evm.check_sca_enabled(wazuh_monitor) + + +@pytest.fixture() +def prepare_cis_policies_file(metadata): + ''' + Copies policy file from named by metadata into agent's ruleset path. Deletes file after test. + Args: + metadata (dict): contains the test metadata. Must contain policy_file key with file name. + ''' + files_to_restore = copy_files_in_folder(src_folder=CIS_RULESET_PATH, dst_folder=sca.TEMP_FILE_PATH) + filename = metadata['policy_file'] + filepath = os.path.join(TEST_DATA_PATH, 'policies', filename) + copy(filepath, CIS_RULESET_PATH) + yield + copy_files_in_folder(src_folder=sca.TEMP_FILE_PATH, dst_folder=CIS_RULESET_PATH, files_to_move=files_to_restore) + delete_file(os.path.join(CIS_RULESET_PATH, filename)) + + +@pytest.fixture() +def prepare_test_folder(folder_path='/testfile', mode=0o666): + ''' + Creates folder with a given mode. + Args: + folder_path (str): path for the folder to create + mode (int): mode to be used for folder creation. + ''' + os.makedirs(folder_path, mode, exist_ok=True) + + yield + + delete_path_recursively(folder_path) diff --git a/tests/integration/test_sca/data/configuration_template/configuration_sca.yaml b/tests/integration/test_sca/data/configuration_template/configuration_sca.yaml new file mode 100644 index 0000000000..554fdc312e --- /dev/null +++ b/tests/integration/test_sca/data/configuration_template/configuration_sca.yaml @@ -0,0 +1,30 @@ +- sections: + - section: sca + elements: + - enabled: + value: ENABLED + - scan_on_start: + value: 'yes' + - interval: + value: INTERVAL + - policies: + elements: + - policy: + value: POLICY_FILE + + - section: rootcheck + elements: + - disabled: + value: 'yes' + + - section: syscheck + elements: + - disabled: + value: 'yes' + + - section: wodle + attributes: + - name: syscollector + elements: + - disabled: + value: 'yes' diff --git a/tests/integration/test_sca/data/policies/cis_centos8_osregex.yaml b/tests/integration/test_sca/data/policies/cis_centos8_osregex.yaml new file mode 100644 index 0000000000..6a5dff9971 --- /dev/null +++ b/tests/integration/test_sca/data/policies/cis_centos8_osregex.yaml @@ -0,0 +1,56 @@ +policy: + id: cis_centos8_osregex + file: cis_centos8_osregex + name: CIS Benchmark for CentOS Linux 8 + description: This is mock file for checking CIS SCA compliance on centos 8 systems + references: + - https://documentation.wazuh.com/current/user-manual/capabilities/sec-config-assessment/index.html + +requirements: + title: Check Centos 8 family platform + description: Requirements for running the policy against CentOS 8 family. + condition: any + rules: + - f:/etc/os-release -> r:Centos + - f:/proc/sys/kernel/ostype -> Linux + +checks: + + # Check with default value - OS_REGEX + - id: 1 + title: Test_1 + description: Test osregex regex engine with osregex rules + rationale: Test_1 + remediation: Run osregex + compliance: + - cis: [1.8.1.5] + - cis_csc: ["5.1"] + - pci_dss: [10.2.5] + - hipaa: [164.312.b] + - nist_800_53: [AU.14, AC.7] + - gpg_13: ["7.8"] + - gdpr_IV: ["35.7", "32.2"] + - tsc: [CC6.1, CC6.8, CC7.2, CC7.3, CC7.4] + condition: all + rules: + - c:stat -L /etc/issue -> r:Access:\s*\(0644/-rw-r--r--\)\s*Uid:\s*\(\s*\t*0/\s*\t*root\)\s*\t*Gid:\s*\(\s*\t*0/\s*\t*root\) + + # Check with PCRE2 value + - id: 2 + title: Test_2 + description: Test osregex regex engine with pcre2 rules + rationale: Test_2 + remediation: Run pcre2 + compliance: + - cis: [1.7.5] + - cis_csc: ["5.1"] + - pci_dss: [10.2.5] + - hipaa: [164.312.b] + - nist_800_53: [AU.14, AC.7] + - gpg_13: ["7.8"] + - gdpr_IV: ["35.7", "32.2"] + - tsc: [CC6.1, CC6.8, CC7.2, CC7.3, CC7.4] + condition: all + rules: + - c:stat /etc/issue -> r:^Access:\s*\(0644\/.{0,10}\)\s*Uid:\s*\(\s*\t*0\/\s*\t*root\)\s*\t*Gid:\s*\(\s*\t*0\/\s*\t*root\)$ + regex_type: pcre2 diff --git a/tests/integration/test_sca/data/policies/cis_centos8_pcre2.yaml b/tests/integration/test_sca/data/policies/cis_centos8_pcre2.yaml new file mode 100644 index 0000000000..e4b6bcbe80 --- /dev/null +++ b/tests/integration/test_sca/data/policies/cis_centos8_pcre2.yaml @@ -0,0 +1,57 @@ +policy: + id: cis_centos8_pcre2 + file: cis_centos8_pcre2 + name: CIS Benchmark for CentOS Linux 8 + description: This is mock file for checking CIS SCA compliance on centos 8 systems + references: + - https://documentation.wazuh.com/current/user-manual/capabilities/sec-config-assessment/index.html + regex_type: pcre2 + +requirements: + title: Check Centos 8 family platform + description: Requirements for running the policy against CentOS 8 family. + condition: any + rules: + - f:/etc/os-release -> r:Centos + - f:/proc/sys/kernel/ostype -> Linux + +checks: + + # Check with default value - OS_REGEX + - id: 1 + title: Test_1 + description: Test pcre2 regex engine with osregex rules + rationale: Test_1 + remediation: Run osregex + compliance: + - cis: [1.8.1.5] + - cis_csc: ["5.1"] + - pci_dss: [10.2.5] + - hipaa: [164.312.b] + - nist_800_53: [AU.14, AC.7] + - gpg_13: ["7.8"] + - gdpr_IV: ["35.7", "32.2"] + - tsc: [CC6.1, CC6.8, CC7.2, CC7.3, CC7.4] + condition: all + rules: + - c:stat -L /etc/issue -> r:Access:\s*\(0644/-rw-r--r--\)\s*Uid:\s*\(\s*\t*0/\s*\t*root\)\s*\t*Gid:\s*\(\s*\t*0/\s*\t*root\) + + # Check with PCRE2 value + - id: 2 + title: Test_2 + description: Test pcre2 regex engine with pcre2 rules + rationale: Test_2 + remediation: Run pcre2 + compliance: + - cis: [1.7.5] + - cis_csc: ["5.1"] + - pci_dss: [10.2.5] + - hipaa: [164.312.b] + - nist_800_53: [AU.14, AC.7] + - gpg_13: ["7.8"] + - gdpr_IV: ["35.7", "32.2"] + - tsc: [CC6.1, CC6.8, CC7.2, CC7.3, CC7.4] + condition: all + rules: + - c:stat /etc/issue -> r:^Access:\s*\(0644\/.{0,10}\)\s*Uid:\s*\(\s*\t*0\/\s*\t*root\)\s*\t*Gid:\s*\(\s*\t*0\/\s*\t*root\)$ + regex_type: pcre2 diff --git a/tests/integration/test_sca/data/policies/cis_centos8_validate_remediation.yaml b/tests/integration/test_sca/data/policies/cis_centos8_validate_remediation.yaml new file mode 100644 index 0000000000..5f2a945981 --- /dev/null +++ b/tests/integration/test_sca/data/policies/cis_centos8_validate_remediation.yaml @@ -0,0 +1,41 @@ +policy: + id: cis_centos8_validate_remediation + file: cis_centos8_validate_remediation.yaml + name: CIS Benchmark for CentOS Linux 8 + description: This is mock file for checking CIS SCA compliance on centos 8 systems + references: + - https://documentation.wazuh.com/current/user-manual/capabilities/sec-config-assessment/index.html + +requirements: + title: Check Centos 8 family platform + description: Requirements for running the policy against CentOS 8 family. + condition: any + rules: + - f:/etc/os-release -> r:Centos + - f:/proc/sys/kernel/ostype -> Linux + +checks: + + # Check that permitions for file are 222 + - id: 1 + title: Test_1 + description: Test osregex regex engine with osregex rules + rationale: Test_1 + remediation: The testfile permissions should be '0222'. Use chmod 222 /testfile command to fix permissions + compliance: + - cis: [1.8.1.5] + condition: all + rules: + - c:stat -L /testfile -> r:Access:\s*\(0222/d-w--w--w-\)\s*Uid:\.*root\)\s*\t*Gid:\.*root\) + + # Check that permitions for file are 644 + - id: 2 + title: Test_2 + description: Test osregex regex engine with pcre2 rules + rationale: Test_2 + remediation: The testfile permissions should be '644'. Use chmod 666 /testfile command to fix permissions + compliance: + - cis: [1.8.1.5] + condition: all + rules: + - c:stat -L /testfile -> r:Access:\s*\(0644/drw-r--r--\)\s*Uid:\.*root\)\s*\t*Gid:\.*root\) diff --git a/tests/integration/test_sca/data/test_cases/cases_sca_disabled.yaml b/tests/integration/test_sca/data/test_cases/cases_sca_disabled.yaml new file mode 100644 index 0000000000..49fd02681f --- /dev/null +++ b/tests/integration/test_sca/data/test_cases/cases_sca_disabled.yaml @@ -0,0 +1,9 @@ +- name: SCA disabled + description: Set enabled to no value + configuration_parameters: + ENABLED: 'no' + INTERVAL: 100 + POLICY_FILE: cis_centos8_osregex.yaml + metadata: + enabled: false + policy_file: cis_centos8_osregex.yaml diff --git a/tests/integration/test_sca/data/test_cases/cases_sca_enabled.yaml b/tests/integration/test_sca/data/test_cases/cases_sca_enabled.yaml new file mode 100644 index 0000000000..3b0014c6d3 --- /dev/null +++ b/tests/integration/test_sca/data/test_cases/cases_sca_enabled.yaml @@ -0,0 +1,9 @@ +- name: SCA enabled + description: Set enabled to yes value + configuration_parameters: + ENABLED: 'yes' + INTERVAL: 100 + POLICY_FILE: cis_centos8_osregex.yaml + metadata: + enabled: true + policy_file: cis_centos8_osregex.yaml diff --git a/tests/integration/test_sca/data/test_cases/cases_scan_results.yaml b/tests/integration/test_sca/data/test_cases/cases_scan_results.yaml new file mode 100644 index 0000000000..e761578c13 --- /dev/null +++ b/tests/integration/test_sca/data/test_cases/cases_scan_results.yaml @@ -0,0 +1,21 @@ +- name: '"CIS CentOS 8 osregex" policy' + description: Run a SCA scan and check regex engine used and results + configuration_parameters: + ENABLED: 'yes' + INTERVAL: 1000 + POLICY_FILE: cis_centos8_osregex.yaml + metadata: + policy_file: cis_centos8_osregex.yaml + results: 2 + regex_type: osregex + +- name: '"CIS CentOS 8 PCRE2 regex" policy' + description: Run a SCA scan and check regex engine used and results + configuration_parameters: + ENABLED: 'yes' + INTERVAL: 1000 + POLICY_FILE: cis_centos8_pcre2.yaml + metadata: + policy_file: cis_centos8_pcre2.yaml + results: 2 + regex_type: pcre2 diff --git a/tests/integration/test_sca/data/test_cases/cases_validate_remediation.yaml b/tests/integration/test_sca/data/test_cases/cases_validate_remediation.yaml new file mode 100644 index 0000000000..4854a19323 --- /dev/null +++ b/tests/integration/test_sca/data/test_cases/cases_validate_remediation.yaml @@ -0,0 +1,25 @@ +- name: SCA Rule Check Fails - Apply remediation. + description: A given rule check fails in SCA scan. After applying remediation, it passes in next scan. + configuration_parameters: + ENABLED: 'yes' + INTERVAL: 5 + POLICY_FILE: cis_centos8_validate_remediation.yaml + metadata: + policy_file: cis_centos8_validate_remediation.yaml + check_id: 1 + perms: 0222 + initial_result: failed + final_result: passed + +- name: SCA Rule Check Passes - Change system cause Fail + description: A given rule check passes in SCA scan. After changing system, it fails in next scan. + configuration_parameters: + ENABLED: 'yes' + INTERVAL: 5 + POLICY_FILE: cis_centos8_validate_remediation.yaml + metadata: + policy_file: cis_centos8_validate_remediation.yaml + check_id: 2 + perms: 0222 + initial_result: passed + final_result: failed diff --git a/tests/integration/test_sca/test_basic.py b/tests/integration/test_sca/test_basic.py new file mode 100644 index 0000000000..9df3f8975c --- /dev/null +++ b/tests/integration/test_sca/test_basic.py @@ -0,0 +1,194 @@ +''' +copyright: Copyright (C) 2015-2023, Wazuh Inc. + Created by Wazuh, Inc. . + This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +type: integration + +brief: These tests will check if the `enabled` option of the SCA module + is working correctly. This option is located in its corresponding section of + the `ossec.conf` file and allows enabling or disabling this module. + +components: + - sca + +targets: + - manager + - agent +daemons: + - wazuh-modulesd + +os_platform: + - linux + +os_version: + - Arch Linux + - Amazon Linux 2 + - Amazon Linux 1 + - CentOS 8 + - CentOS 7 + - Debian Buster + - Red Hat 8 + - Ubuntu Focal + - Ubuntu Bionic + +references: + - https://documentation.wazuh.com/current/user-manual/capabilities/sec-config-assessment/index.html + +tags: + - sca +''' +import os +import pytest + +from wazuh_testing import LOG_FILE_PATH +from wazuh_testing.tools.configuration import load_configuration_template, get_test_cases_data +from wazuh_testing.tools.monitoring import FileMonitor +from wazuh_testing.modules.sca import event_monitor as evm +from wazuh_testing.modules.sca import SCA_DEFAULT_LOCAL_INTERNAL_OPTIONS as local_internal_options + + +pytestmark = [pytest.mark.linux, pytest.mark.tier(level=0)] + +# Reference paths +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') +TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') + +# Configuration and cases data +configurations_path = os.path.join(CONFIGURATIONS_PATH, 'configuration_sca.yaml') + +# ---------------------------------------------------- TEST_ENABLED --------------------------------------------------- +# Test configurations +t1_cases_path = os.path.join(TEST_CASES_PATH, 'cases_sca_enabled.yaml') +t1_configuration_parameters, t1_configuration_metadata, t1_case_ids = get_test_cases_data(t1_cases_path) +t1_configurations = load_configuration_template(configurations_path, t1_configuration_parameters, + t1_configuration_metadata) + +# ---------------------------------------------------- TEST_DISABLED -------------------------------------------------- +# Test configurations +t2_cases_path = os.path.join(TEST_CASES_PATH, 'cases_sca_disabled.yaml') +t2_configuration_parameters, t2_configuration_metadata, t2_case_ids = get_test_cases_data(t2_cases_path) +t2_configurations = load_configuration_template(configurations_path, t2_configuration_parameters, + t2_configuration_metadata) + + +@pytest.mark.parametrize('configuration, metadata', zip(t1_configurations, t1_configuration_metadata), ids=t1_case_ids) +def test_sca_enabled(configuration, metadata, prepare_cis_policies_file, truncate_monitored_files, + set_wazuh_configuration, configure_local_internal_options_function, restart_wazuh_function): + ''' + description: Check SCA behavior when enabled tag is set to yes. + + test_phases: + - Set a custom Wazuh configuration. + - Copy cis_sca ruleset file into agent. + - Restart wazuh. + - Check that sca module starts if enabled is set to 'yes' + - Check in the log that the sca module started appears. + - Check that sca scan starts and finishes + + wazuh_min_version: 4.6.0 + + tier: 0 + + parameters: + - configuration: + type: dict + brief: Wazuh configuration data. Needed for set_wazuh_configuration fixture. + - metadata: + type: dict + brief: Wazuh configuration metadata. + - prepare_cis_policies_file: + type: fixture + brief: copy test sca policy file. Delete it after test. + - set_wazuh_configuration: + type: fixture + brief: Set the wazuh configuration according to the configuration data. + - configure_local_internal_options_function: + type: fixture + brief: Configure the local_internal_options_file. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - restart_modulesd_function: + type: fixture + brief: Restart the wazuh-modulesd daemon. + - wait_for_sca_enabled: + type: fixture + brief: Wait for the sca Module to start before starting the test. + + assertions: + - Verify that when the `enabled` option is set to `yes`, the SCA module is enabled. + - Verify the sca scan starts. + - Verify the sca scan ends. + + input_description: + - The `cases_sca_enabled.yaml` file provides the module configuration for this test. + - the cis*.yaml files located in the policies folder provide the sca rules to check. + + expected_output: + - r'.*sca.*INFO: (Module started.)' + - r'.*sca.*INFO: (Starting Security Configuration Assessment scan).' + - r".*sca.*INFO: Security Configuration Assessment scan finished. Duration: (\\d+) seconds." + ''' + wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) + evm.check_sca_enabled(wazuh_log_monitor) + evm.check_sca_scan_started(wazuh_log_monitor) + evm.check_sca_scan_ended(wazuh_log_monitor) + + +@pytest.mark.parametrize('configuration, metadata', zip(t2_configurations, t2_configuration_metadata), ids=t2_case_ids) +def test_sca_disabled(configuration, metadata, prepare_cis_policies_file, truncate_monitored_files, + set_wazuh_configuration, configure_local_internal_options_function, restart_wazuh_function): + ''' + description: Check SCA behavior when enabled tag is set no. + + test_phases: + - Set a custom Wazuh configuration. + - Copy cis_sca ruleset file into agent. + - Restart wazuh. + - Check that sca module is disabled if enabled tag is set to 'no' + + wazuh_min_version: 4.6.0 + + tier: 0 + + parameters: + - configuration: + type: dict + brief: Wazuh configuration data. Needed for set_wazuh_configuration fixture. + - metadata: + type: dict + brief: Wazuh configuration metadata. + - prepare_cis_policies_file: + type: fixture + brief: copy test sca policy file. Delete it after test. + - set_wazuh_configuration: + type: fixture + brief: Set the wazuh configuration according to the configuration data. + - configure_local_internal_options_function: + type: fixture + brief: Configure the local_internal_options_file. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - restart_modulesd_function: + type: fixture + brief: Restart the wazuh-modulesd daemon. + - wait_for_sca_enabled: + type: fixture + brief: Wait for the sca Module to start before starting the test. + + assertions: + - Verify that when the `enabled` option is set to `no`, the SCA module does not start. + + input_description: + - The `cases_sca_disabled.yaml` file provides the module configuration for this test. + - the cis*.yaml files located in the policies folder provide the sca rules to check. + + expected_output: + - r".*sca.*INFO: (Module disabled). Exiting." + ''' + + wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) + evm.check_sca_disabled(wazuh_log_monitor) diff --git a/tests/integration/test_sca/test_scan_results.py b/tests/integration/test_sca/test_scan_results.py new file mode 100644 index 0000000000..acb1d88dca --- /dev/null +++ b/tests/integration/test_sca/test_scan_results.py @@ -0,0 +1,141 @@ +''' +copyright: Copyright (C) 2015-2023, Wazuh Inc. + Created by Wazuh, Inc. . + This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +type: integration + +brief: These tests will that a scan is ran using the configured sci_sca ruleset and regex engine. + +components: + - sca + +suite: sca + +targets: + - manager + - agent + +daemons: + - wazuh-modulesd + +os_platform: + - linux + +os_version: + - CentOS 8 + +references: + - https://documentation.wazuh.com/current/user-manual/capabilities/sec-config-assessment/index.html + +tags: + - sca +''' +import os +import pytest + +from wazuh_testing import LOG_FILE_PATH +from wazuh_testing.tools.configuration import load_configuration_template, get_test_cases_data +from wazuh_testing.tools.monitoring import FileMonitor +from wazuh_testing.modules.sca import event_monitor as evm +from wazuh_testing.modules.sca import SCA_DEFAULT_LOCAL_INTERNAL_OPTIONS as local_internal_options + + +pytestmark = [pytest.mark.linux, pytest.mark.tier(level=0)] + +# Reference paths +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') +TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') + +# Configuration and cases data +configurations_path = os.path.join(CONFIGURATIONS_PATH, 'configuration_sca.yaml') +cases_path = os.path.join(TEST_CASES_PATH, 'cases_scan_results.yaml') + +# Test configurations +configuration_parameters, configuration_metadata, case_ids = get_test_cases_data(cases_path) +configurations = load_configuration_template(configurations_path, configuration_parameters, configuration_metadata) + + +# Tests +@pytest.mark.parametrize('configuration, metadata', zip(configurations, configuration_metadata), ids=case_ids) +def test_sca_scan_results(configuration, metadata, prepare_cis_policies_file, truncate_monitored_files, + set_wazuh_configuration, configure_local_internal_options_function, restart_wazuh_function, + wait_for_sca_enabled): + ''' + description: This test will check that a SCA scan is correctly executed on an agent, with a given policy file and + a regex engine. For this it will copy a policy file located in the data folder and verify the engine + used, the amount of results found, and that the results come from the policy file. + + test_phases: + - Copy cis_sca ruleset file into agent. + - Restart wazuh. + - Check in the log that the sca module started appears. + - Check the regex engine used by the policy. + - Get the result for each ID check + - Check that the policy_id from the scan matches with the file used. + + wazuh_min_version: 4.6.0 + + tier: 0 + + parameters: + - configuration: + type: dict + brief: Wazuh configuration data. Needed for set_wazuh_configuration fixture. + - metadata: + type: dict + brief: Wazuh configuration metadata. + - prepare_cis_policies_file: + type: fixture + brief: copy test sca policy file. Delete it after test. + - set_wazuh_configuration: + type: fixture + brief: Set the wazuh configuration according to the configuration data. + - configure_local_internal_options_function: + type: fixture + brief: Configure the local_internal_options_file. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - restart_modulesd_function: + type: fixture + brief: Restart the wazuh-modulesd daemon. + - wait_for_sca_enabled: + type: fixture + brief: Wait for the sca Module to start before starting the test. + + assertions: + - Verify that when the `enabled` option is set to `yes`, the SCA module is enabled. + - Assert the engine used matches the regex_type configured in the metadata + - Assert the scan gets results from each rule check + + input_description: + - The `cases_scan_results.yaml` file provides the module configuration for this test. + - The cis*.yaml files located in the policies folder provide the sca rules to check. + + expected_output: + - r'.*sca.*INFO: (Module started.)' + - r'.*sca.*INFO: (Starting Security Configuration Assessment scan).' + - r".*sca.*DEBUG: SCA will use '(.*)' engine to check the rules." + - r".*sca.*wm_sca_hash_integrity.*DEBUG: ID: (\\d+); Result: '(.*)'" + - r'.*sca_send_alert.*Sending event: (.*)' + ''' + + wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) + + # Wait for the end of SCA scan + evm.check_sca_scan_started(wazuh_log_monitor) + + # Check the regex engine used by SCA + engine = evm.get_scan_regex_engine(wazuh_log_monitor) + assert engine == metadata['regex_type'], f"Wrong regex-engine found: {engine}, expected: {metadata['regex_type']}" + + # Check all checks have been done + evm.get_sca_scan_rule_id_results(file_monitor=wazuh_log_monitor, results_num=int(metadata['results'])) + + # Get scan summary event and check it matches with the policy file used + summary = evm.get_sca_scan_summary(file_monitor=wazuh_log_monitor) + assert summary['policy_id'] == metadata['policy_file'][0:-5], f"Unexpected policy_id found. Got \ + {summary['policy_id']}, expected \ + {metadata['policy_file'][0:-5]}" diff --git a/tests/integration/test_sca/test_validate_remediation.py b/tests/integration/test_sca/test_validate_remediation.py new file mode 100644 index 0000000000..5f8d7361fa --- /dev/null +++ b/tests/integration/test_sca/test_validate_remediation.py @@ -0,0 +1,144 @@ +''' +copyright: Copyright (C) 2015-2023, Wazuh Inc. + Created by Wazuh, Inc. . + This program is free software; you can redistribute it and/or modify it under the terms of GPLv2 + +type: integration + +brief: These tests will that a scan is ran using the configured sci_sca ruleset and regex engine. + +components: + - sca + +suite: sca + +targets: + - manager + - agent + +daemons: + - wazuh-modulesd + +os_platform: + - linux + +os_version: + - CentOS 8 + +references: + - https://documentation.wazuh.com/current/user-manual/capabilities/sec-config-assessment/index.html + +tags: + - sca +''' +import os +import pytest + +from wazuh_testing import LOG_FILE_PATH +from wazuh_testing.tools.configuration import load_configuration_template, get_test_cases_data +from wazuh_testing.tools.monitoring import FileMonitor +from wazuh_testing.modules.sca import event_monitor as evm +from wazuh_testing.modules.sca import SCA_DEFAULT_LOCAL_INTERNAL_OPTIONS as local_internal_options + + +pytestmark = [pytest.mark.linux, pytest.mark.tier(level=0)] + +# Reference paths +TEST_DATA_PATH = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data') +CONFIGURATIONS_PATH = os.path.join(TEST_DATA_PATH, 'configuration_template') +TEST_CASES_PATH = os.path.join(TEST_DATA_PATH, 'test_cases') + +# Configuration and cases data +configurations_path = os.path.join(CONFIGURATIONS_PATH, 'configuration_sca.yaml') +cases_path = os.path.join(TEST_CASES_PATH, 'cases_validate_remediation.yaml') + +# Test configurations +configuration_parameters, configuration_metadata, case_ids = get_test_cases_data(cases_path) +configurations = load_configuration_template(configurations_path, configuration_parameters, configuration_metadata) +test_folder = '/testfile' + + +# Tests +@pytest.mark.parametrize('configuration, metadata', zip(configurations, configuration_metadata), ids=case_ids) +def test_validate_remediation_results(configuration, metadata, prepare_cis_policies_file, truncate_monitored_files, + prepare_test_folder, set_wazuh_configuration, + configure_local_internal_options_function, restart_wazuh_function, + wait_for_sca_enabled): + ''' + description: This test will check that a SCA scan results, with the expected initial results (passed/failed) for a + given check, results change on subsequent checks if change is done to the system. For this a folder's + permissions will be checked, passing or failing if the permissions match. Then, the permissions for + the folder will be changed and wait for a new scan, and validate the results changed as expected. + + test_phases: + - Copy cis_sca ruleset file into agent + - Create a folder that will be checked by the SCA rules + - Restart wazuh + - Validate the result for a given SCA check are as expected + - Change the folder's permissions + - Validate the result for a given SCA check change as expected + + wazuh_min_version: 4.6.0 + + tier: 0 + + parameters: + - configuration: + type: dict + brief: Wazuh configuration data. Needed for set_wazuh_configuration fixture. + - metadata: + type: dict + brief: Wazuh configuration metadata. + - prepare_cis_policies_file: + type: fixture + brief: copy test sca policy file. Delete it after test. + - prepare_test_folder: + type: fixture + brief: Create a folder with a given set of permissions. Delete it after test. + - set_wazuh_configuration: + type: fixture + brief: Set the wazuh configuration according to the configuration data. + - configure_local_internal_options_function: + type: fixture + brief: Configure the local_internal_options_file. + - truncate_monitored_files: + type: fixture + brief: Truncate all the log files and json alerts files before and after the test execution. + - restart_modulesd_function: + type: fixture + brief: Restart the wazuh-modulesd daemon. + - wait_for_sca_enabled: + type: fixture + brief: Wait for the sca Module to start before starting the test. + + assertions: + - Assert the result for a given check passed/failed as expected + - Assert the result for a given check changes as expected after remediation/breaking commands + + input_description: + - The `cases_validate_remediation.yaml` file provides the module configuration for this test. + - The cis*.yaml files located in the policies folder provide the sca rules to check. + + expected_output: + - r".*sca.*wm_sca_hash_integrity.*DEBUG: ID: (\\d+); Result: '(.*)'" + ''' + + wazuh_log_monitor = FileMonitor(LOG_FILE_PATH) + + # Get the results for the checks obtained in the initial SCA scan + results = evm.get_sca_scan_rule_id_results(file_monitor=wazuh_log_monitor, results_num=2) + + # Assert the tested check has initial expected results (failed/passed) + check_result = results[metadata['check_id']-1][1] + assert check_result == metadata['initial_result'], f"Got unexcepted SCA result: {metadata['initial_result']},\ + got {check_result}" + # Modify the folder's permissions + os.chmod(test_folder, metadata['perms']) + + # Get the results for the checks obtained in the SCA scan + results = evm.get_sca_scan_rule_id_results(file_monitor=wazuh_log_monitor, results_num=2) + + # Assert the tested check result changed as expected (passed to failed, and vice-versa) + check_result = results[metadata['check_id']-1][1] + assert check_result == metadata['final_result'], f"Got unexcepted SCA result: {metadata['initial_result']},\ + got {check_result}" diff --git a/tests/integration/test_vulnerability_detector/test_cpe_helper/test_cpe_helper.py b/tests/integration/test_vulnerability_detector/test_cpe_helper/test_cpe_helper.py index 40f6a26de7..8a68b9aa9b 100644 --- a/tests/integration/test_vulnerability_detector/test_cpe_helper/test_cpe_helper.py +++ b/tests/integration/test_vulnerability_detector/test_cpe_helper/test_cpe_helper.py @@ -278,7 +278,7 @@ def test_cpe_indexing_wrong_tags(configuration, metadata, agent_system, agent_pa - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Restore the original cpe_helper.json - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 2 @@ -368,7 +368,7 @@ def test_cpe_indexing_wrong_values(configuration, metadata, agent_system, agent_ - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Restore the original cpe_helper.json - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 2 @@ -458,7 +458,7 @@ def test_cpe_indexing_missing_field(configuration, metadata, agent_system, agent - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Restore the original cpe_helper.json - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 2 @@ -548,7 +548,7 @@ def test_cpe_indexing_empty_fields(configuration, metadata, agent_system, agent_ - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Restore the original cpe_helper.json - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 @@ -628,7 +628,7 @@ def test_cpe_indexing_empty_vendor_version(configuration, metadata, agent_system - Restore initial configuration, both ossec.conf and local_internal_options.conf. - Restore the original cpe_helper.json - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 1 diff --git a/tests/integration/test_vulnerability_detector/test_feeds/test_download_feeds.py b/tests/integration/test_vulnerability_detector/test_feeds/test_download_feeds.py index f0a437bc0e..21aaf89b5b 100644 --- a/tests/integration/test_vulnerability_detector/test_feeds/test_download_feeds.py +++ b/tests/integration/test_vulnerability_detector/test_feeds/test_download_feeds.py @@ -146,7 +146,10 @@ def test_download_feeds(configuration, metadata, set_wazuh_configuration_vdt, tr timeout=metadata['download_timeout']) if metadata['update_treshold_weeks'] != 'None': - assert vd.feed_is_recently_updated(provider_name=metadata['provider_name'], - provider_os=metadata['provider_os'], - threshold_weeks=metadata['update_treshold_weeks']), '' \ - f"The {metadata['provider_os']} feed has not been recently updated" + try: + assert vd.feed_is_recently_updated(provider_name=metadata['provider_name'], + provider_os=metadata['provider_os'], + threshold_weeks=metadata['update_treshold_weeks']), '' \ + f"The {metadata['provider_os']} feed has not been recently updated" + except AssertionError: + pytest.xfail(reason="The vendor didn't update its feed, so the test fails. But this is not a Wazuh defect.") diff --git a/tests/integration/test_vulnerability_detector/test_feeds/test_msu_inventory.py b/tests/integration/test_vulnerability_detector/test_feeds/test_msu_inventory.py index ca955bc3b7..b03c56be84 100644 --- a/tests/integration/test_vulnerability_detector/test_feeds/test_msu_inventory.py +++ b/tests/integration/test_vulnerability_detector/test_feeds/test_msu_inventory.py @@ -104,7 +104,7 @@ def test_msu_catalog_patches(configuration, metadata, set_wazuh_configuration_vd - Clean the database. - Stop wazuh-modulesd. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 2 diff --git a/tests/scans/code_analysis/conftest.py b/tests/scans/code_analysis/conftest.py index 910974932c..7cab9ea939 100644 --- a/tests/scans/code_analysis/conftest.py +++ b/tests/scans/code_analysis/conftest.py @@ -41,8 +41,7 @@ def clone_wazuh_repository(pytestconfig): """ # Get Wazuh repository and branch repository_name = pytestconfig.getoption('repo') - branch = pytestconfig.getoption('branch') - commit = pytestconfig.getoption('commit') + reference = pytestconfig.getoption('reference') # Create temporary dir repository_path = tempfile.mkdtemp() @@ -50,18 +49,18 @@ def clone_wazuh_repository(pytestconfig): try: # Clone into temporary dir # depth=1 creates a shallow clone with a history truncated to 1 commit. Implies single_branch=True. - if not commit: + try: Repo.clone_from(f"https://github.com/wazuh/{repository_name}.git", repository_path, depth=1, - branch=branch) - else: + branch=reference) + except: repo = Repo.clone_from(f"https://github.com/wazuh/{repository_name}.git", repository_path, branch='master', no_single_branch=True) # Get all branches that contains the commit git_local = Git(repository_path) - commit_branch = git_local.branch('-a', '--contains', commit).split('\n') + commit_branch = git_local.branch('-a', '--contains', reference).split('\n') commit_branch_head = False for branch in commit_branch: @@ -69,11 +68,11 @@ def clone_wazuh_repository(pytestconfig): branch_name = branch.replace('*', '').strip() repo.git.checkout(branch_name) # Check if the commit is the head of the branch - if(str(repo.head.commit) == commit): + if(str(repo.head.commit) == reference): commit_branch_head = True break if not commit_branch_head: - raise Exception(f"{commit} was not found as any head branch") + raise Exception(f"{reference} was not found as any head branch") yield repository_path diff --git a/tests/scans/conftest.py b/tests/scans/conftest.py index 8b8323b5de..0c47d86257 100644 --- a/tests/scans/conftest.py +++ b/tests/scans/conftest.py @@ -1,11 +1,9 @@ -DEFAULT_BRANCH = 'master' +DEFAULT_REFERENCE = 'master' DEFAULT_REPOSITORY = 'wazuh' def pytest_addoption(parser): - parser.addoption("--branch", action="store", default=DEFAULT_BRANCH, - help=f"Set the repository used. Default: {DEFAULT_REPOSITORY}") + parser.addoption("--reference", action="store", default=DEFAULT_REFERENCE, + help=f"Set the reference used. Default: {DEFAULT_REPOSITORY}") parser.addoption("--repo", action="store", default=DEFAULT_REPOSITORY, - help=f"Set the repository branch. Default: {DEFAULT_BRANCH}") - parser.addoption("--commit", action="store", default=None, - help=f"Set the repository commit. Default: None") + help=f"Set the repository used. Default: {DEFAULT_REPOSITORY}") diff --git a/tests/scans/dependencies/test_dependencies.py b/tests/scans/dependencies/test_dependencies.py index 915bb0c545..0db8932fef 100644 --- a/tests/scans/dependencies/test_dependencies.py +++ b/tests/scans/dependencies/test_dependencies.py @@ -17,7 +17,7 @@ def test_python_dependencies_vuln_scan(pytestconfig): Args: pytestconfig (fixture): Fixture that returns the :class:`_pytest.config.Config` object. """ - branch = pytestconfig.getoption('--branch') + branch = pytestconfig.getoption('--reference') repo = pytestconfig.getoption('--repo') requirements_path = pytestconfig.getoption('--requirements-path') report_path = pytestconfig.getoption('--report-path') diff --git a/tests/system/provisioning/basic_cluster/roles/master-role/tasks/main.yml b/tests/system/provisioning/basic_cluster/roles/master-role/tasks/main.yml index 759441e597..8cb3db04ef 100644 --- a/tests/system/provisioning/basic_cluster/roles/master-role/tasks/main.yml +++ b/tests/system/provisioning/basic_cluster/roles/master-role/tasks/main.yml @@ -124,7 +124,7 @@ command: /var/ossec/bin/wazuh-control restart - name: "Install necessary dependencies" - command: /var/ossec/framework/python/bin/python3.9 -m pip install lockfile filetype certifi testinfra + command: /var/ossec/framework/python/bin/python3 -m pip install lockfile filetype certifi testinfra - name: Copy wdb-query.py script copy: diff --git a/tests/system/provisioning/basic_cluster/roles/worker-role/tasks/main.yml b/tests/system/provisioning/basic_cluster/roles/worker-role/tasks/main.yml index be89ed7e54..b0e9a2bfcc 100644 --- a/tests/system/provisioning/basic_cluster/roles/worker-role/tasks/main.yml +++ b/tests/system/provisioning/basic_cluster/roles/worker-role/tasks/main.yml @@ -112,4 +112,4 @@ command: /var/ossec/bin/wazuh-control restart - name: "Install necessary dependencies" - command: /var/ossec/framework/python/bin/python3.9 -m pip install lockfile filetype certifi testinfra + command: /var/ossec/framework/python/bin/python3 -m pip install lockfile filetype certifi testinfra diff --git a/tests/system/provisioning/big_cluster_40_agents/roles/master-role/tasks/main.yml b/tests/system/provisioning/big_cluster_40_agents/roles/master-role/tasks/main.yml index 252aa9618a..41b511d842 100644 --- a/tests/system/provisioning/big_cluster_40_agents/roles/master-role/tasks/main.yml +++ b/tests/system/provisioning/big_cluster_40_agents/roles/master-role/tasks/main.yml @@ -106,4 +106,4 @@ command: /var/ossec/bin/wazuh-control restart - name: "Install necessary dependencies" - command: /var/ossec/framework/python/bin/python3.9 -m pip install lockfile filetype certifi testinfra + command: /var/ossec/framework/python/bin/python3 -m pip install lockfile filetype certifi testinfra diff --git a/tests/system/provisioning/big_cluster_40_agents/roles/worker-role/tasks/main.yml b/tests/system/provisioning/big_cluster_40_agents/roles/worker-role/tasks/main.yml index 8a145dacad..670214dbad 100644 --- a/tests/system/provisioning/big_cluster_40_agents/roles/worker-role/tasks/main.yml +++ b/tests/system/provisioning/big_cluster_40_agents/roles/worker-role/tasks/main.yml @@ -105,4 +105,4 @@ command: /var/ossec/bin/wazuh-control restart - name: "Install necessary dependencies" - command: /var/ossec/framework/python/bin/python3.9 -m pip install lockfile filetype certifi testinfra + command: /var/ossec/framework/python/bin/python3 -m pip install lockfile filetype certifi testinfra diff --git a/tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/main.yaml b/tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/main.yml similarity index 96% rename from tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/main.yaml rename to tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/main.yml index 9dd425abf0..797cfd83e1 100644 --- a/tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/main.yaml +++ b/tests/system/provisioning/four_manager_disconnected_node/roles/master-role/tasks/main.yml @@ -105,4 +105,4 @@ command: /var/ossec/bin/wazuh-control restart - name: "Install necessary dependencies" - command: /var/ossec/framework/python/bin/python3.9 -m pip install lockfile filetype certifi testinfra + command: /var/ossec/framework/python/bin/python3 -m pip install lockfile filetype certifi testinfra diff --git a/tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/main.yaml b/tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/main.yml similarity index 96% rename from tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/main.yaml rename to tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/main.yml index d8daae427d..6099f3417d 100644 --- a/tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/main.yaml +++ b/tests/system/provisioning/four_manager_disconnected_node/roles/worker-role/tasks/main.yml @@ -105,4 +105,4 @@ command: "{{restart_command}}" - name: "Install necessary dependencies" - command: /var/ossec/framework/python/bin/python3.9 -m pip install lockfile filetype certifi testinfra + command: /var/ossec/framework/python/bin/python3 -m pip install lockfile filetype certifi testinfra diff --git a/tests/system/test_active_response/test_netsh_windows_command/test_firewall_alerts/test_firewall_alerts.py b/tests/system/test_active_response/test_netsh_windows_command/test_firewall_alerts/test_firewall_alerts.py index 025ad2ee07..cf67f551f0 100644 --- a/tests/system/test_active_response/test_netsh_windows_command/test_firewall_alerts/test_firewall_alerts.py +++ b/tests/system/test_active_response/test_netsh_windows_command/test_firewall_alerts/test_firewall_alerts.py @@ -40,7 +40,7 @@ def test_firewall_alerts(configure_environment, metadata, generate_events): - teardown: - Restore initial configuration, ossec.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 0 diff --git a/tests/system/test_active_response/test_netsh_windows_command/test_firewall_status/test_firewall_status.py b/tests/system/test_active_response/test_netsh_windows_command/test_firewall_status/test_firewall_status.py index 8ceaf4c293..c685de9d2e 100644 --- a/tests/system/test_active_response/test_netsh_windows_command/test_firewall_status/test_firewall_status.py +++ b/tests/system/test_active_response/test_netsh_windows_command/test_firewall_status/test_firewall_status.py @@ -44,7 +44,7 @@ def test_firewall_status(metadata, configure_environment, generate_events): - teardown: - Restore initial configuration, ossec.conf. - wazuh_min_version: 4.5.0 + wazuh_min_version: 4.6.0 tier: 0 diff --git a/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py b/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py index d0f1b3b7c3..870fdd5351 100644 --- a/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py +++ b/tests/system/test_cluster/test_agent_files_deletion/test_agent_files_deletion.py @@ -96,7 +96,7 @@ def test_agent_files_deletion(): for host in managers_hosts: for query in queries: result = host_manager.run_command(host, - f"{WAZUH_PATH}/framework/python/bin/python3.9 " + f"{WAZUH_PATH}/framework/python/bin/python3 " f"{script_path} '{query.format(id=agent_id)}'") assert result, f"This db query should have returned something in {host}, but it did not: {result}" @@ -122,7 +122,7 @@ def test_agent_files_deletion(): for host in managers_hosts: for query in queries: result = host_manager.run_command(host, - f"{WAZUH_PATH}/framework/python/bin/python3.9 " + f"{WAZUH_PATH}/framework/python/bin/python3 " f"{script_path} '{query.format(id=agent_id)}'") assert not result, f"This db query should have not returned anything in {host}, but it did: {result}" diff --git a/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py b/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py index d89271a1e5..ef2867a6c8 100644 --- a/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py +++ b/tests/system/test_cluster/test_agent_info_sync/test_agent_info_sync.py @@ -111,13 +111,13 @@ def test_agent_info_sync(clean_cluster_logs, remove_labels): # Obtain the modified agent ID. modified_agent_id = host_manager.run_command(master_host, - f"{WAZUH_PATH}/framework/python/bin/python3.9 " + f"{WAZUH_PATH}/framework/python/bin/python3 " f"{script_path} '{queries[0].format(agent=modified_agent)}'") # Check that the agent label is updated in the master's database. sleep(time_to_sync) result = host_manager.run_command(master_host, - f"{WAZUH_PATH}/framework/python/bin/python3.9 " + f"{WAZUH_PATH}/framework/python/bin/python3 " f"{script_path} \"{queries[1].format(label=label)}\"") assert modified_agent_id, \ @@ -139,14 +139,14 @@ def test_agent_info_sync_remove_agent(clean_cluster_logs): # Ensure the agent to be removed is present in the Worker's socket before attempting the test agent_list = host_manager.run_command('wazuh-worker2', - f"{WAZUH_PATH}/framework/python/bin/python3.9 " + f"{WAZUH_PATH}/framework/python/bin/python3 " f"{script_path} \"{queries[2]}\"") assert deleted_agent in agent_list, f"{deleted_agent} was not found in wazuh-worker2\'s global.db" # Obtain the deleted agent ID deleted_agent_id = host_manager.run_command(master_host, - f"{WAZUH_PATH}/framework/python/bin/python3.9 " + f"{WAZUH_PATH}/framework/python/bin/python3 " f"{script_path} '{queries[0].format(agent=deleted_agent)}'") deleted_agent_id = json.loads(deleted_agent_id.replace('[', '').replace(']', '').replace("'", '"')) diff --git a/version.json b/version.json index da2c152cda..4ba72e7fc8 100644 --- a/version.json +++ b/version.json @@ -1,4 +1,4 @@ { - "version": "4.5.0", - "revision": "40500" + "version": "4.6.0", + "revision": "40600" }