Skip to content

Commit

Permalink
style(#3335): Fix docstrings punctuation and capitalization
Browse files Browse the repository at this point in the history
  • Loading branch information
nico-stefani committed Dec 28, 2022
1 parent 7790439 commit 4242c6e
Show file tree
Hide file tree
Showing 11 changed files with 117 additions and 100 deletions.
16 changes: 8 additions & 8 deletions deps/wazuh_testing/wazuh_testing/modules/aws/cli_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,10 @@ class OutputAnalysisError(Exception):


def call_aws_module(*parameters) -> str:
"""Given some parameters call the AWS module and return the output
"""Given some parameters call the AWS module and return the output.
Returns:
str: The command output
str: The command output.
"""
command = [AWS_MODULE_PATH, *parameters]
logger.debug("Calling AWS module with: '%s'", command)
Expand All @@ -34,16 +34,16 @@ def _default_callback(line: str) -> str:
def analyze_command_output(
command_output: str, callback: Callable = _default_callback, expected_results: int = 1, error_message: str = ''
):
"""Analyze the given command output searching for a pattern
"""Analyze the given command output searching for a pattern.
Args:
command_output (str): the output to analyze
callback (Callable, optional): a callback to process each line. Defaults to _default_callback.
expected_results (int, optional): number of expected results. Defaults to 1.
error_message (str, optional): message to show with the exception. Defaults to ''.
command_output (str): The output to analyze.
callback (Callable, optional): A callback to process each line. Defaults to _default_callback.
expected_results (int, optional): Number of expected results. Defaults to 1.
error_message (str, optional): Message to show with the exception. Defaults to ''.
Raises:
OutputAnalysisError: when the expected results are not correct
OutputAnalysisError: When the expected results are not correct.
"""

results = []
Expand Down
33 changes: 25 additions & 8 deletions deps/wazuh_testing/wazuh_testing/modules/aws/data_generator.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Utils to generate sample data to AWS"""
from datetime import datetime
from uuid import uuid4
from typing import Optional

from . import constants as cons

Expand All @@ -10,18 +11,18 @@ class DataGenerator:
BASE_FILE_NAME = ''

def get_filename(self, *args, **kwargs) -> str:
"""Returns the filename according to the integration format
"""Returns the filename according to the integration format.
Returns:
str: syntetic filename
str: Syntetic filename.
"""
raise NotImplementedError()

def get_data_sample(self, *args, **kwargs) -> dict:
"""Returns a sample of data according to the integration format
"""Returns a sample of data according to the integration format.
Returns:
dict: syntetic data
dict: Syntetic data.
"""
raise NotImplementedError()

Expand All @@ -30,9 +31,14 @@ class CloudTrailDataGenerator(DataGenerator):
BASE_PATH = f'{cons.AWS_LOGS}/{cons.RANDOM_ACCOUNT_ID}/{cons.CLOUD_TRAIL}/{cons.US_EAST_1_REGION}/'
BASE_FILE_NAME = f'{cons.RANDOM_ACCOUNT_ID}_{cons.CLOUD_TRAIL}_{cons.US_EAST_1_REGION}_'

def get_filename(self, prefix=None, **kwargs) -> str:
"""Return the filename in the cloudtrail format
<prefix>/AWSLogs/<suffix>/<organization_id>/<account_id>/CloudTrail/<region>/<year>/<month>/<day>
def get_filename(self, *args, **kwargs) -> str:
"""Return the filename in the cloudtrail format.
Example:
<prefix>/AWSLogs/<suffix>/<organization_id>/<account_id>/CloudTrail/<region>/<year>/<month>/<day>
Returns:
str: Syntetic filename.
"""
now = datetime.now()
path = f"{self.BASE_PATH}{now.strftime(cons.PATH_DATE_FORMAT)}/"
Expand All @@ -41,6 +47,11 @@ def get_filename(self, prefix=None, **kwargs) -> str:
return f'{path}{name}'

def get_data_sample(self) -> dict:
"""Returns a sample of data according to the cloudtrail format.
Returns:
dict: Syntetic data.
"""
return {
'Records': [
{
Expand Down Expand Up @@ -93,6 +104,12 @@ def get_data_sample(self) -> dict:


def get_data_generator(bucket_type: str) -> DataGenerator:
"""Given the bucket type return the correspondant data generator instance
"""Given the bucket type return the correspondant data generator instance.
Args:
bucket_type (str): Bucket type to match the data generator.
Returns:
DataGenerator: Data generator for the given bucket.
"""
return buckets_data_mapping[bucket_type]()
24 changes: 12 additions & 12 deletions deps/wazuh_testing/wazuh_testing/modules/aws/db_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,28 +19,28 @@ def get_db_connection(path: Path) -> sqlite3.Connection:


def s3_db_exists() -> bool:
"""Check if `s3_cloudtrail.db` exists
"""Check if `s3_cloudtrail.db` exists.
Returns:
bool: True if exists else False
bool: True if exists else False.
"""
return S3_CLOUDTRAIL_DB_PATH.exists()


def delete_s3_db() -> None:
"""Delete `s3_cloudtrail.db` file"""
"""Delete `s3_cloudtrail.db` file."""
if s3_db_exists():
S3_CLOUDTRAIL_DB_PATH.unlink()


def get_s3_db_row(table_name: str) -> S3CloudTrailRow:
"""Return one row from the given table name
"""Return one row from the given table name.
Args:
table_name (str): table name to search into
table_name (str): Table name to search into.
Returns:
S3CloudTrailRow: the first row of the table
S3CloudTrailRow: The first row of the table.
"""
connection = get_db_connection(S3_CLOUDTRAIL_DB_PATH)
cursor = connection.cursor()
Expand All @@ -50,13 +50,13 @@ def get_s3_db_row(table_name: str) -> S3CloudTrailRow:


def get_multiple_s3_db_row(table_name: str) -> Iterator[S3CloudTrailRow]:
"""Return all rows from the given table name
"""Return all rows from the given table name.
Args:
table_name (str): table name to search into
table_name (str): Table name to search into.
Yields:
Iterator[S3CloudTrailRow]: all the rows in the table
Iterator[S3CloudTrailRow]: All the rows in the table.
"""
connection = get_db_connection(S3_CLOUDTRAIL_DB_PATH)
cursor = connection.cursor()
Expand All @@ -66,13 +66,13 @@ def get_multiple_s3_db_row(table_name: str) -> Iterator[S3CloudTrailRow]:


def table_exists(table_name: str) -> bool:
"""Check if the given table name exists
"""Check if the given table name exists.
Args:
table_name (str): table name to search for
table_name (str): Table name to search for.
Returns:
bool: True if exists else False
bool: True if exists else False.
"""
connection = get_db_connection(S3_CLOUDTRAIL_DB_PATH)
cursor = connection.cursor()
Expand Down
10 changes: 5 additions & 5 deletions deps/wazuh_testing/wazuh_testing/modules/aws/event_monitor.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ def make_aws_callback(pattern, prefix=''):
Args:
pattern (str): String to match on the log.
prefix (str): regular expression used as prefix before the pattern.
prefix (str): Regular expression used as prefix before the pattern.
Returns:
lambda: function that returns if there's a match in the file
lambda: Function that returns if there's a match in the file.
"""
pattern = r'\s+'.join(pattern.split())
regex = re.compile(r'{}{}'.format(prefix, pattern))
Expand All @@ -25,13 +25,13 @@ def make_aws_callback(pattern, prefix=''):


def callback_detect_aws_module_called(parameters: list) -> Callable:
"""Detects if aws module was called with correct parameters
"""Detects if aws module was called with correct parameters.
Args:
parameters (list): values to check
parameters (list): Values to check.
Returns:
Callable: callback to match the line
Callable: Callback to match the line.
"""
regex = re.compile(fr'.*DEBUG: Launching S3 Command: {" ".join(parameters)}\n*')
return lambda line: regex.match(line)
Expand Down
28 changes: 14 additions & 14 deletions deps/wazuh_testing/wazuh_testing/modules/aws/s3_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,14 @@


def upload_file(bucket_type: str, bucket_name: str) -> str:
"""Upload a file to an S3 bucket
"""Upload a file to an S3 bucket.
Args:
bucket_type (str): Bucket type to generate the data
bucket_name (str): Bucket to upload
bucket_type (str): Bucket type to generate the data.
bucket_name (str): Bucket to upload.
Returns:
str: the name of the file if was uploaded, else ''
str: The name of the file if was uploaded, else ''.
"""
dg = get_data_generator(bucket_type)
filename = dg.get_filename()
Expand All @@ -42,20 +42,20 @@ def delete_file(filename: str, bucket_name: str) -> None:
"""Delete a given file from the bucket.
Args:
filename (str): Full filename to delete
bucket_name (str): bucket that contains the file
filename (str): Full filename to delete.
bucket_name (str): Bucket that contains the file.
"""
s3.Object(bucket_name, filename).delete()


def file_exists(filename: str, bucket_name: str) -> bool:
"""Check if a file exists in a bucket
"""Check if a file exists in a bucket.
Args:
filename (str): Full filename to check
bucket_name (str): bucket that contains the file
filename (str): Full filename to check.
bucket_name (str): Bucket that contains the file.
Returns:
bool: True if exists else False
bool: True if exists else False.
"""
exists = True
try:
Expand All @@ -68,14 +68,14 @@ def file_exists(filename: str, bucket_name: str) -> bool:


def get_last_file_key(bucket_type: str, bucket_name: str) -> str:
"""Return the last file key contained in a default path of a bucket
"""Return the last file key contained in a default path of a bucket.
Args:
bucket_type (str): Bucket type to obtain the data generator
bucket_name (str): Bucket that contains the file
bucket_type (str): Bucket type to obtain the data generator.
bucket_name (str): Bucket that contains the file.
Returns:
str: The last key in the bucket
str: The last key in the bucket.
"""

dg = get_data_generator(bucket_type)
Expand Down
12 changes: 6 additions & 6 deletions tests/integration/test_aws/test_discard_regex.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ def test_discard_regex(
- test:
- Check in the ossec.log that a line has appeared calling the module with correct parameters.
- Check the expected number of events were forwarded to analysisd, only logs stored in the bucket and skips
the ones that match with regex
- Check the database was created and updated accordingly
the ones that match with regex.
- Check the database was created and updated accordingly.
- teardown:
- Truncate wazuh logs.
- Restore initial configuration, both ossec.conf and local_internal_options.conf.
Expand All @@ -71,7 +71,7 @@ def test_discard_regex(
brief: Apply changes to the ossec.conf configuration.
- clean_s3_cloudtrail_db:
type: fixture
brief: Delete the DB file before and after the test execution
brief: Delete the DB file before and after the test execution.
- configure_local_internal_options_function:
type: fixture
brief: Apply changes to the local_internal_options.conf configuration.
Expand All @@ -83,11 +83,11 @@ def test_discard_regex(
brief: Restart the wazuh service.
- wazuh_log_monitor:
type: fixture
brief: Return a `ossec.log` monitor
brief: Return a `ossec.log` monitor.
assertions:
- Check in the log that the module was called with correct parameters.
- Check the expected number of events were forwarded to analysisd
- Check the database was created and updated accordingly
- Check the expected number of events were forwarded to analysisd.
- Check the database was created and updated accordingly.
input_description:
- The `configuration_discard_regex` file provides the module configuration for this test.
- The `cases_discard_regex` file provides the test cases.
Expand Down
Loading

0 comments on commit 4242c6e

Please sign in to comment.