Skip to content

Commit

Permalink
add: Add generated documentation format selection. #2075
Browse files Browse the repository at this point in the history
Now, using `--format` you can select if you want the output data with JSON or YAML format.
  • Loading branch information
Luis Gonzalez committed Nov 3, 2021
1 parent 2d973f1 commit 302e590
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 26 deletions.
31 changes: 18 additions & 13 deletions deps/wazuh_testing/wazuh_testing/qa_docs/doc_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,18 @@ class DocGenerator:
__id_counter (int): An integer that counts the test/group ID when it is created.
ignore_regex (list): A list with compiled paths to be ignored.
include_regex (list): A list with regular expressions used to parse a file or not.
file_format (str): Generated documentation format.
"""
LOGGER = Logging.get_logger(QADOCS_LOGGER)

def __init__(self, config):
def __init__(self, config, file_format='json'):
"""Class constructor
Initialize every attribute.
Args:
config (Config): A `Config` instance with the loaded configuration.
file_format (str): Generated documentation format.
"""
self.conf = config
self.parser = CodeParser(self.conf)
Expand All @@ -51,6 +53,7 @@ def __init__(self, config):
if self.conf.mode == Mode.DEFAULT:
for include_regex in self.conf.include_regex:
self.include_regex.append(re.compile(include_regex.replace('\\', '/')))
self.file_format = file_format

def is_valid_folder(self, path):
"""Check if a folder is included so it would be parsed.
Expand Down Expand Up @@ -152,21 +155,23 @@ def dump_output(self, content, doc_path):
DocGenerator.LOGGER.debug('Creating documentation folder')
os.makedirs(os.path.dirname(doc_path))

DocGenerator.LOGGER.debug(f"Writing {doc_path}.json")
if self.file_format == 'json':
DocGenerator.LOGGER.debug(f"Writing {doc_path}.json")

try:
with open(f"{doc_path}.json", 'w+') as out_file:
out_file.write(f"{json.dumps(content, indent=4)}\n")
except IOError:
raise QAValueError(f"Cannot write in {doc_path}.json", DocGenerator.LOGGER.error)
try:
with open(f"{doc_path}.json", 'w+') as out_file:
out_file.write(f"{json.dumps(content, indent=4)}\n")
except IOError:
raise QAValueError(f"Cannot write in {doc_path}.json", DocGenerator.LOGGER.error)

DocGenerator.LOGGER.debug(f"Writing {doc_path}.yaml")
if self.file_format == 'yaml':
DocGenerator.LOGGER.debug(f"Writing {doc_path}.yaml")

try:
with open(doc_path + ".yaml", "w+") as out_file:
out_file.write(yaml.dump(content))
except IOError:
raise QAValueError(f"Cannot write in {doc_path}.yaml", DocGenerator.LOGGER.error)
try:
with open(doc_path + ".yaml", "w+") as out_file:
out_file.write(yaml.dump(content))
except IOError:
raise QAValueError(f"Cannot write in {doc_path}.yaml", DocGenerator.LOGGER.error)

def create_group(self, path, group_id):
"""Parse the content of a group file and dump the content into a file.
Expand Down
27 changes: 20 additions & 7 deletions deps/wazuh_testing/wazuh_testing/qa_docs/lib/index_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import os
import re
import json
import yaml
import requests
from elasticsearch import Elasticsearch, helpers

Expand All @@ -20,23 +21,27 @@ class IndexData:
Attributes:
path (str): A string that contains the path where the parsed documentation is located.
index (str): A string with the index name to be indexed with Elasticsearch.
files_format (str): A string with the generated documentation format.
regex: A regular expression to get JSON files.
es (ElasticSearch): An `ElasticSearch` client instance.
output (list): A list to be indexed in Elasticsearch.
"""
LOGGER = Logging.get_logger(QADOCS_LOGGER)

def __init__(self, index, path):
def __init__(self, index, path, file_format):
"""Class constructor
Initialize every attribute.
Args:
config (Config): A `Config` instance with the loaded configuration.
index (str): Index name.
path (str): Path where the generated documentation is allocated.
file_format (str): Generated documentation format.
"""
self.path = path
self.index = index
self.regex = re.compile(".*json")
self.files_format = file_format
self.regex = re.compile(f".*{file_format}")
self.es = Elasticsearch()
self.output = []

Expand Down Expand Up @@ -74,16 +79,24 @@ def read_files_content(self, files):
Args:
files (list): A list with the files that matched with the regex.
"""
for file in files:
with open(file) as test_file:
lines = json.load(test_file)
self.output.append(lines)
if self.files_format == 'json':
for file in files:
with open(file, 'r') as test_file:
lines = json.load(test_file)
self.output.append(lines)
else:
for file in files:
with open(file, 'r') as test_file:
lines = yaml.load(test_file, Loader=yaml.FullLoader)
self.output.append(lines)


def remove_index(self):
"""Delete an index."""
delete = self.es.indices.delete(index=self.index, ignore=[400, 404])
IndexData.LOGGER.info(f'Delete index {self.index}\n {delete}\n')


def run(self):
"""Collect all the documentation files and makes a request to the BULK API to index the new data."""
self.test_connection()
Expand Down
20 changes: 14 additions & 6 deletions deps/wazuh_testing/wazuh_testing/scripts/qa_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
VERSION_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'qa_docs', 'VERSION.json')
SCHEMA_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'qa_docs', 'schema.yaml')
OUTPUT_PATH = os.path.join(gettempdir(), 'qa_docs', 'output')
OUTPUT_FORMAT = 'json'
LOG_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'qa_docs', 'log')
SEARCH_UI_PATH = os.path.join(os.path.dirname(os.path.dirname(os.path.realpath(__file__))), 'qa_docs', 'search_ui')
qadocs_logger = Logging(QADOCS_LOGGER, 'INFO', True, os.path.join(LOG_PATH,
Expand Down Expand Up @@ -54,6 +55,10 @@ def set_parameters(args):
global OUTPUT_PATH
OUTPUT_PATH = args.output_path

if args.output_format:
global OUTPUT_FORMAT
OUTPUT_FORMAT = args.output_format


def get_parameters():
"""Capture the script parameters
Expand Down Expand Up @@ -102,6 +107,9 @@ def get_parameters():

parser.add_argument('-o', dest='output_path',
help="Specifies the output directory for test parsed when `-t, --tests` is used.")

parser.add_argument('--format', dest='output_format', choices=['json', 'yaml'],
help="Specifies the generated files format.")

parser.add_argument('-e', '--exist', nargs='+', default=[], dest='test_exist',
help="Checks if test(s) exist or not.",)
Expand Down Expand Up @@ -295,7 +303,7 @@ def parse_data(args):
qadocs_logger.info(f"Parsing the following test(s) {args.test_names}")

docs = DocGenerator(Config(SCHEMA_PATH, args.tests_path, OUTPUT_PATH, test_names=args.test_names,
check_doc=args.check_doc))
check_doc=args.check_doc), OUTPUT_FORMAT)

# Parse a list of test types
elif args.test_types:
Expand All @@ -304,15 +312,15 @@ def parse_data(args):
# Parse a list of test modules
if args.test_modules:
docs = DocGenerator(Config(SCHEMA_PATH, args.tests_path, OUTPUT_PATH, args.test_types,
args.test_modules))
args.test_modules), OUTPUT_FORMAT)
else:
docs = DocGenerator(Config(SCHEMA_PATH, args.tests_path, OUTPUT_PATH, args.test_types))
docs = DocGenerator(Config(SCHEMA_PATH, args.tests_path, OUTPUT_PATH, args.test_types), OUTPUT_FORMAT)

# Parse the whole path
else:
if not (args.index_name or args.app_index_name or args.launching_index_name):
qadocs_logger.info(f"Parsing all tests located in {args.tests_path}")
docs = DocGenerator(Config(SCHEMA_PATH, args.tests_path, OUTPUT_PATH))
docs = DocGenerator(Config(SCHEMA_PATH, args.tests_path, OUTPUT_PATH), OUTPUT_FORMAT)
docs.run()

if args.test_types or args.test_modules or args.test_names and not args.check_doc:
Expand All @@ -326,7 +334,7 @@ def index_and_visualize_data(args):
"""Index the data previously parsed and visualize it."""
# Index the previous parsed tests into Elasticsearch
if args.index_name:
index_data = IndexData(args.index_name, OUTPUT_PATH)
index_data = IndexData(args.index_name, OUTPUT_PATH, OUTPUT_FORMAT)
index_data.run()

# Launch SearchUI with index_name as input
Expand All @@ -337,7 +345,7 @@ def index_and_visualize_data(args):
# Index the previous parsed tests into Elasticsearch and then launch SearchUI
elif args.launching_index_name:
qadocs_logger.debug(f"Indexing {args.launching_index_name}")
index_data = IndexData(args.launching_index_name, OUTPUT_PATH)
index_data = IndexData(args.launching_index_name, OUTPUT_PATH, OUTPUT_FORMAT)
index_data.run()
# When SearchUI index is not hardcoded, it will be use args.launching_index_name
run_searchui(args.launching_index_name)
Expand Down

0 comments on commit 302e590

Please sign in to comment.