Skip to content

Commit

Permalink
Merge pull request #1691 from wazuh/dev-1681-docgenerator-search_ui-i…
Browse files Browse the repository at this point in the history
…ntegration

DocGenerator - Search UI integration
  • Loading branch information
palaciosjeremias authored Aug 9, 2021
2 parents 53ccb14 + 45c8911 commit 9fa1d31
Show file tree
Hide file tree
Showing 32 changed files with 30,058 additions and 41 deletions.
35 changes: 23 additions & 12 deletions docs/DocGenerator/DocGenerator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,23 @@
import re
import json
import yaml
import ast
from lib.Config import Config
from lib.CodeParser import CodeParser
from lib.Sanity import Sanity
from lib.Utils import clean_folder
from lib.IndexData import IndexData
import warnings
import logging
import argparse

VERSION = "0.1"
CONFIG_PATH = "config.yaml"


class DocGenerator:
"""
brief: Main class of DocGenerator tool.
It´s in charge of walk every test file, and every group file to dump the parsed documentation
It´s in charge of walk every test file, and every group file to dump the parsed documentation.
"""
def __init__(self, config):
self.conf = config
Expand All @@ -41,7 +42,7 @@ def __init__(self, config):

def is_valid_folder(self, path):
"""
brief: Checks if a path should be ignored because its in the ignore list.
brief: Checks if a path should be ignored because it is in the ignore list.
args:
- "path (str): Folder location to be controlled"
returns: "boolean: False if the path should be ignored. True otherwise."
Expand All @@ -53,7 +54,8 @@ def is_valid_folder(self, path):

def is_valid_file(self, file):
"""
brief: Checks if a file name should be ignored because it's in the ignore list or doesn´t match with the regexes.
brief: Checks if a file name should be ignored because it's in the ignore list
or doesn´t match with the regexes.
args:
- "file (str): File name to be controlled"
returns: "boolean: False if the file should be ignored. True otherwise."
Expand Down Expand Up @@ -84,7 +86,7 @@ def get_group_doc_path(self, group):
returns: "string: The name of the documentation group file"
"""
base_path = os.path.join(self.conf.documentation_path, os.path.basename(self.scan_path))
doc_path = os.path.join(base_path,group['name']+".group")
doc_path = os.path.join(base_path, group['name']+".group")
return doc_path

def get_test_doc_path(self, path):
Expand Down Expand Up @@ -154,11 +156,9 @@ def create_test(self, path, group_id):
logging.warning(f"Content for {path} is empty, ignoring it")
return None



def parse_folder(self, path, group_id):
"""
brief: Search in a specific folder to parse possible group files and each test file
brief: Search in a specific folder to parse possible group files and each test file.
args:
- "path (string): The path of the folder to be parsed."
- "group_id (string): The id of the group where the new elements belong."
Expand All @@ -173,15 +173,15 @@ def parse_folder(self, path, group_id):
(root, folders, files) = next(os.walk(path))
for file in files:
if self.is_group_file(file):
new_group = self.create_group(os.path.join(root,file), group_id)
new_group = self.create_group(os.path.join(root, file), group_id)
if new_group:
group_id = new_group
break
for file in files:
if self.is_valid_file(file):
self.create_test(os.path.join(root,file), group_id)
self.create_test(os.path.join(root, file), group_id)
for folder in folders:
self.parse_folder(os.path.join(root,folder), group_id)
self.parse_folder(os.path.join(root, folder), group_id)

def run(self):
"""
Expand All @@ -194,8 +194,9 @@ def run(self):
logging.debug(f"Going to parse files on '{path}'")
self.parse_folder(path, self.__id_counter)


def start_logging(folder, debug_level=logging.INFO):
LOG_PATH = os.path.join(folder, os.path.splitext(os.path.basename(__file__))[0]+".log" )
LOG_PATH = os.path.join(folder, os.path.splitext(os.path.basename(__file__))[0]+".log")
if not os.path.exists(folder):
os.makedirs(folder)
logging.basicConfig(filename=LOG_PATH, level=debug_level)
Expand All @@ -207,6 +208,8 @@ def start_logging(folder, debug_level=logging.INFO):
parser.add_argument('-v', help="Print version", action='store_true', dest="version")
parser.add_argument('-t', help="Test configuration", action='store_true', dest='test_config')
parser.add_argument('-d', help="Enable debug messages.", action='count', dest='debug_level')
parser.add_argument('-i', help="Indexes the data to elasticsearch.", dest='index_name')
parser.add_argument('-l', help="Indexes the data and launch the application.", dest='launch_app')
args = parser.parse_args()

if args.debug_level:
Expand All @@ -221,6 +224,14 @@ def start_logging(folder, debug_level=logging.INFO):
elif args.sanity:
sanity = Sanity(Config(CONFIG_PATH))
sanity.run()
elif args.index_name:
indexData = IndexData(args.index_name, Config(CONFIG_PATH))
indexData.run()
elif args.launch_app:
indexData = IndexData(args.launch_app, Config(CONFIG_PATH))
indexData.run()
os.chdir("Search-UI")
os.system("ELASTICSEARCH_HOST=http://localhost:9200 npm start")
else:
docs = DocGenerator(Config(CONFIG_PATH))
docs.run()
24 changes: 24 additions & 0 deletions docs/DocGenerator/Search-UI/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

# dependencies
/node_modules
/netlify-lambda
/.pnp
.pnp.js

# testing
/coverage

# production
/build

# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local

npm-debug.log*
yarn-debug.log*
yarn-error.log*
39 changes: 39 additions & 0 deletions docs/DocGenerator/Search-UI/functions/search.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/*
This is a Netlify Function that proxies our Elasticsearch instance.
*/
import fetch from "node-fetch";
import https from "https";
import http from "http";

// Don't do this in production, this is in place to aid with demo environments which have self-signed certificates.
const httpsAgent = new https.Agent({
rejectUnauthorized: false
});

const httpAgent = new http.Agent();

exports.handler = function(event, context, callback) {
const host = process.env.ELASTICSEARCH_HOST;
const agent = host.startsWith("http:") ? httpAgent : httpsAgent;

fetch(`${host}/qa-doc/_search`, {
method: "POST",
headers: { "content-type": "application/json" },
body: event.body,
agent
})
.then(response => response.text().then(body => [response, body]))
.then(([response, body]) => {
callback(null, {
statusCode: response.status,
body: body
});
})
.catch(e => {
console.error(e);
callback(null, {
statusCode: 500,
body: `An error occurred: ${e}`
});
});
};
4 changes: 4 additions & 0 deletions docs/DocGenerator/Search-UI/netlify.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
[build]
functions = "netlify-lambda"
publish = "build/"
command = "npm run build"
Loading

0 comments on commit 9fa1d31

Please sign in to comment.