Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Custom qa-docs logger use #1896

Merged
merged 15 commits into from
Sep 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
80 changes: 57 additions & 23 deletions deps/wazuh_testing/wazuh_testing/qa_docs/doc_generator.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,20 @@
import re
import json
import yaml
from wazuh_testing.qa_docs.lib.config import mode
from wazuh_testing.qa_docs.lib.config import Config, mode
from wazuh_testing.qa_docs.lib.code_parser import CodeParser
from wazuh_testing.qa_docs.lib.utils import clean_folder
import warnings
import logging

from wazuh_testing.qa_docs import QADOCS_LOGGER
from wazuh_testing.tools.logging import Logging
from wazuh_testing.tools.exceptions import QAValueError

class DocGenerator:
"""
brief: Main class of DocGenerator tool.
It´s in charge of walk every test file, and every group file to dump the parsed documentation.
"""
LOGGER = Logging.get_logger(QADOCS_LOGGER)

def __init__(self, config):
self.conf = config
self.parser = CodeParser(self.conf)
Expand All @@ -44,7 +46,9 @@ def is_valid_folder(self, path):
"""
for regex in self.ignore_regex:
if regex.match(path):
DocGenerator.LOGGER.warning(f"Folder validation: {regex} not matching with {path}")
return False

return True

def is_valid_file(self, file):
Expand All @@ -55,12 +59,17 @@ def is_valid_file(self, file):
- "file (str): File name to be controlled"
returns: "boolean: False if the file should be ignored. True otherwise."
"""

for regex in self.ignore_regex:
if regex.match(file):
DocGenerator.LOGGER.warning(f"File validation: {regex} not matching with {file}")
return False

for regex in self.include_regex:
if regex.match(file):
DocGenerator.LOGGER.warning(f"File validation: {regex} not matching with {file}")
return True

return False

def is_group_file(self, path):
Expand All @@ -70,9 +79,11 @@ def is_group_file(self, path):
- "path (str): File location to be controlled"
returns: "boolean: True if the file is a group file. False otherwise."
"""

for group_file in self.conf.group_files:
if path == group_file:
return True

return False

def get_group_doc_path(self, group):
Expand All @@ -82,6 +93,7 @@ def get_group_doc_path(self, group):
"""
base_path = os.path.join(self.conf.documentation_path, os.path.basename(self.scan_path))
doc_path = os.path.join(base_path, group['name']+".group")

return doc_path

def get_test_doc_path(self, path):
Expand All @@ -94,6 +106,7 @@ def get_test_doc_path(self, path):
base_path = os.path.join(self.conf.documentation_path, os.path.basename(self.scan_path))
relative_path = path.replace(self.scan_path, "")
doc_path = os.path.splitext(base_path + relative_path)[0]

return doc_path

def dump_output(self, content, doc_path):
Expand All @@ -105,11 +118,22 @@ def dump_output(self, content, doc_path):
- "doc_path (string): The path where the information should be dumped."
"""
if not os.path.exists(os.path.dirname(doc_path)):
DocGenerator.LOGGER.debug('Creating documentation folder')
os.makedirs(os.path.dirname(doc_path))
with open(doc_path + ".json", "w+") as outfile:
outfile.write(json.dumps(content, indent=4))
with open(doc_path + ".yaml", "w+") as outfile:
outfile.write(yaml.dump(content))

try:
DocGenerator.LOGGER.debug(f"Writing {doc_path}.json")
with open(doc_path + ".json", "w+") as out_file:
out_file.write(json.dumps(content, indent=4))
except IOError:
raise QAValueError(f"Cannot write in {doc_path}.json", DocGenerator.LOGGER.error)

try:
DocGenerator.LOGGER.debug(f"Writing {doc_path}.yaml")
with open(doc_path + ".yaml", "w+") as out_file:
out_file.write(yaml.dump(content))
except IOError:
raise QAValueError(f"Cannot write in {doc_path}.yaml", DocGenerator.LOGGER.error)

def create_group(self, path, group_id):
"""
Expand All @@ -121,14 +145,14 @@ def create_group(self, path, group_id):
"""
self.__id_counter = self.__id_counter + 1
group = self.parser.parse_group(path, self.__id_counter, group_id)

if group:
doc_path = self.get_group_doc_path(group)
self.dump_output(group, doc_path)
logging.debug(f"New group file '{doc_path}' was created with ID:{self.__id_counter}")
DocGenerator.LOGGER.debug(f"New group file '{doc_path}' was created with ID:{self.__id_counter}")
return self.__id_counter
else:
warnings.warn(f"Content for {path} is empty, ignoring it", stacklevel=2)
logging.warning(f"Content for {path} is empty, ignoring it")
DocGenerator.LOGGER.warning(f"Content for {path} is empty, ignoring it")
return None

def create_test(self, path, group_id):
Expand All @@ -141,19 +165,20 @@ def create_test(self, path, group_id):
"""
self.__id_counter = self.__id_counter + 1
test = self.parser.parse_test(path, self.__id_counter, group_id)

if test:
if self.conf.mode == mode.DEFAULT:
doc_path = self.get_test_doc_path(path)
elif self.conf.mode == mode.SINGLE_TEST:
doc_path = self.conf.documentation_path
if self.print_test_info(test) is None:
#
return
self.dump_output(test, doc_path)
logging.debug(f"New documentation file '{doc_path}' was created with ID:{self.__id_counter}")
DocGenerator.LOGGER.debug(f"New documentation file '{doc_path}' was created with ID:{self.__id_counter}")
return self.__id_counter
else:
warnings.warn(f"Content for {path} is empty, ignoring it", stacklevel=2)
logging.warning(f"Content for {path} is empty, ignoring it")
DocGenerator.LOGGER.warning(f"Content for {path} is empty, ignoring it")
return None

def parse_folder(self, path, group_id):
Expand All @@ -164,22 +189,25 @@ def parse_folder(self, path, group_id):
- "group_id (string): The id of the group where the new elements belong."
"""
if not os.path.exists(path):
warnings.warn(f"Include path '{path}' doesn´t exist", stacklevel=2)
logging.warning(f"Include path '{path}' doesn´t exist")
DocGenerator.LOGGER.warning(f"Include path '{path}' doesn´t exist")
return

if not self.is_valid_folder(path):
logging.debug(f"Ignoring files on '{path}'")
DocGenerator.LOGGER.debug(f"Ignoring files on '{path}'")
return

(root, folders, files) = next(os.walk(path))
for file in files:
if self.is_group_file(file):
new_group = self.create_group(os.path.join(root, file), group_id)
if new_group:
group_id = new_group
break

for file in files:
if self.is_valid_file(file):
self.create_test(os.path.join(root, file), group_id)

for folder in folders:
self.parse_folder(os.path.join(root, folder), group_id)

Expand All @@ -188,7 +216,8 @@ def locate_test(self):
brief: try to get the test path
"""
complete_test_name = f"{self.conf.test_name}.py"
logging.info(f"Looking for {complete_test_name}")
DocGenerator.LOGGER.info(f"Looking for {complete_test_name}")

for root, dirnames, filenames in os.walk(self.conf.project_path, topdown=True):
for filename in filenames:
if filename == complete_test_name:
Expand All @@ -211,13 +240,15 @@ def print_test_info(self, test):
for field in self.conf.module_info:
for name, schema_field in field.items():
test_info[name] = test[schema_field]

for field in self.conf.test_info:
for name, schema_field in field.items():
test_info[name] = test['tests'][0][schema_field]

# If output path does not exist, it is created
if not os.path.exists(self.conf.documentation_path):
os.mkdir(self.conf.documentation_path)

# Dump data
with open(os.path.join(self.conf.documentation_path, f"{self.conf.test_name}.json"), 'w') as fp:
fp.write(json.dumps(test_info, indent=4))
Expand All @@ -227,6 +258,7 @@ def print_test_info(self, test):
for field in self.conf.module_info:
for name, schema_field in field.items():
print(str(name)+": "+str(test[schema_field]))

for field in self.conf.test_info:
for name, schema_field in field.items():
print(str(name)+": "+str(test['tests'][0][schema_field]))
Expand All @@ -239,19 +271,21 @@ def run(self):
Normal mode: expected behaviour, Single test mode: found the test required and par it
"""
if self.conf.mode == mode.DEFAULT:
logging.info("\nStarting documentation parsing")
DocGenerator.LOGGER.info("Starting documentation parsing")
DocGenerator.LOGGER.debug(f"Cleaning doc folder located in {self.conf.documentation_path}")
clean_folder(self.conf.documentation_path)

for path in self.conf.include_paths:
self.scan_path = path
logging.debug(f"Going to parse files on '{path}'")
DocGenerator.LOGGER.debug(f"Going to parse files on '{path}'")
self.parse_folder(path, self.__id_counter)

elif self.conf.mode == mode.SINGLE_TEST:
logging.info("\nStarting test documentation parsing")
DocGenerator.LOGGER.info("Starting test documentation parsing")
self.test_path = self.locate_test()

if self.test_path:
logging.debug(f"Parsing '{self.conf.test_name}'")
DocGenerator.LOGGER.debug(f"Parsing '{self.conf.test_name}'")
self.create_test(self.test_path, 0)
else:
logging.error(f"'{self.conf.test_name}' could not be found")
DocGenerator.LOGGER.error(f"'{self.conf.test_name}' could not be found")
26 changes: 12 additions & 14 deletions deps/wazuh_testing/wazuh_testing/qa_docs/lib/code_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,11 @@
import os
import re
import yaml

from wazuh_testing.qa_docs.lib.pytest_wrap import PytestWrap
from wazuh_testing.qa_docs.lib.utils import remove_inexistent
import warnings
import logging
from wazuh_testing.qa_docs import QADOCS_LOGGER
from wazuh_testing.tools.logging import Logging

INTERNAL_FIELDS = ['id', 'group_id', 'name']
STOP_FIELDS = ['tests', 'test_cases']
Expand All @@ -24,6 +25,8 @@ class CodeParser:
"""
brief: Class that parses the content of the test files.
"""
LOGGER = Logging.get_logger(QADOCS_LOGGER)

def __init__(self, config):
self.conf = config
self.pytest = PytestWrap()
Expand Down Expand Up @@ -70,13 +73,10 @@ def parse_comment(self, function):

except Exception as inst:
if hasattr(function, 'name'):
warnings.warn(f"Failed to parse comment of function '{function.name}'' from module {self.scan_file}. \
Error: {inst}", stacklevel=2)
logging.warning(f"Failed to parse comment of function '{function.name}'' from module {self.scan_file}. \
Error: {inst}")
CodeParser.LOGGER.warning(f"Failed to parse test documentation in {function.name} "
"from module {self.scan_file}. Error: {inst}")
else:
warnings.warn(f"Failed to parse comment of module {self.scan_file}. Error: {inst}", stacklevel=2)
logging.warning(f"Failed to parse comment of module {self.scan_file}. Error: {inst}")
CodeParser.LOGGER.warning(f"Failed to parse module documentation in {self.scan_file}. Error: {inst}")
doc = None

return doc
Expand All @@ -89,7 +89,7 @@ def parse_test(self, code_file, id, group_id):
-"id (integer): Id of the new test document"
-"group_id (integer): Id of the group where the new test document belongs."
"""
logging.debug(f"Parsing test file '{code_file}'")
CodeParser.LOGGER.debug(f"Parsing test file '{code_file}'")
self.scan_file = code_file
with open(code_file) as fd:
file_content = fd.read()
Expand Down Expand Up @@ -117,8 +117,7 @@ def parse_test(self, code_file, id, group_id):
functions_doc.append(function_doc)

if not functions_doc:
warnings.warn(f"Module '{module_doc['name']}' doesn´t contain any test function", stacklevel=2)
logging.warning(f"Module '{module_doc['name']}' doesn´t contain any test function")
CodeParser.LOGGER.warning(f"Module '{module_doc['name']}' doesn´t contain any test function")
else:
module_doc['tests'] = functions_doc

Expand All @@ -135,14 +134,13 @@ def parse_group(self, group_file, id, group_id):
-"group_id (integer): Id of the group where the new group document belongs."
"""
MD_HEADER = "# "
logging.debug(f"Parsing group file '{group_file}'")
CodeParser.LOGGER.debug(f"Parsing group file '{group_file}'")
with open(group_file) as fd:
file_header = fd.readline()
file_content = fd.read()

if not file_header.startswith(MD_HEADER):
warnings.warn(f"Group file '{group_file}' doesn´t contain a valid header", stacklevel=2)
logging.warning(f"Group file '{group_file}' doesn´t contain a valid header")
CodeParser.LOGGER.warning(f"Group file '{group_file}' doesn´t contain a valid header")
return None

group_doc = {}
Expand Down
Loading