Skip to content

Commit

Permalink
Merge pull request #485 from AutomationSolutionz/node-enhancements
Browse files Browse the repository at this point in the history
Implement Automated Cleanup, Python Version Check, and Retry Mechanism for Failed Report Uploads
  • Loading branch information
sazid authored Sep 10, 2024
2 parents 2f49700 + a633fee commit 7b6ce3c
Show file tree
Hide file tree
Showing 4 changed files with 131 additions and 32 deletions.
78 changes: 78 additions & 0 deletions Framework/MainDriverApi.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@
from rich.console import Console
from rich.box import ASCII_DOUBLE_HEAD, DOUBLE
from rich.padding import Padding
from jinja2 import Environment, FileSystemLoader
from time import sleep

rich_print = Console().print

top_path = os.path.dirname(os.getcwd())
Expand Down Expand Up @@ -1533,6 +1536,39 @@ def upload_reports_and_zips(Userid, temp_ini_file, run_id):
CommonUtil.Exception_Handler(sys.exc_info())
time.sleep(4)
else:
try:
## Create a folder in failed_upload directory with run_id
failed_upload_dir = Path(temp_ini_file).parent / 'failed_uploads'
os.makedirs(failed_upload_dir, exist_ok=True)

failed_run_id_dir = failed_upload_dir / run_id
os.makedirs(failed_run_id_dir, exist_ok=True)

## Create a files subfolder files in the run_id folder
if perf_report_html:
failed_files_dir = failed_run_id_dir / "files"
os.makedirs(failed_files_dir, exist_ok=True)

## Move the perf_report_html.name to that
failed_upload_filename = os.path.basename(perf_report_html.name)
shutil.copy(perf_report_html.name, os.path.join(failed_files_dir,failed_upload_filename))
else:
failed_upload_filename = None

failed_report_json = {
"run_id": run_id,
"method": "POST",
"URL": "create_report_log_api",
"execution_report": json.dumps(tc_report),
"processed_tc_id": processed_tc_id,
"perf_filepath" : failed_upload_filename
}

failed_report_json_path = failed_run_id_dir / "report.json"
with open(failed_report_json_path, 'w') as file:
file.write(json.dumps(failed_report_json))
except:
CommonUtil.ExecLog(sModuleInfo, "Could not save the report to retry later of run_id '%s'" % run_id, 3)
CommonUtil.ExecLog(sModuleInfo, "Could not Upload the report to server of run_id '%s'" % run_id, 3)

zip_files = [os.path.join(zip_dir, f) for f in os.listdir(zip_dir) if f.endswith(".zip")]
Expand Down Expand Up @@ -1592,6 +1628,48 @@ def upload_reports_and_zips(Userid, temp_ini_file, run_id):
except:
CommonUtil.Exception_Handler(sys.exc_info())

def retry_failed_report_upload():
while True:
try:
sModuleInfo = inspect.currentframe().f_code.co_name + " : " + MODULE_NAME
failed_report_dir = PROJECT_ROOT / 'AutomationLog' / 'failed_uploads'
os.makedirs(failed_report_dir, exist_ok=True)
folders = [entry.name for entry in failed_report_dir.iterdir() if entry.is_dir()]

if folders == []:
return
else:
for folder in folders:
report_json_path = failed_report_dir / folder / 'report.json'
report_json = json.load(open(report_json_path))
if not report_json.get('perf_filepath'):
res = RequestFormatter.request("post",
RequestFormatter.form_uri("create_report_log_api/"),
data={"execution_report": report_json.get('execution_report')},
verify=False)
else:
res = RequestFormatter.request("post",
RequestFormatter.form_uri("create_report_log_api/"),
data={"execution_report": report_json.get('execution_report'),
"processed_tc_id":report_json.get('processed_tc_id')

},
files=[("file",open(failed_report_dir / folder / 'files' /report_json.get('perf_filepath'),'rb'))],
verify=False)

if res.status_code == 200:
CommonUtil.ExecLog(sModuleInfo, f"Successfully uploaded the execution report of run_id {report_json.get('run_id')}", 1)
shutil.rmtree(failed_report_dir / folder)
else:
CommonUtil.ExecLog(sModuleInfo, f"Unabel to upload the execution report of run_id {report_json.get('run_id')}", 1)
except Exception as e:
CommonUtil.ExecLog(sModuleInfo, str(e), 3)
pass

sleep(120)




def split_testcases(run_id_info, max_tc_in_single_session):
import copy
Expand Down
2 changes: 2 additions & 0 deletions Framework/deploy_handler/long_poll_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import random
import requests
from colorama import Fore
import threading

from Framework.Utilities import RequestFormatter

Expand Down Expand Up @@ -73,6 +74,7 @@ def run(self, host: str) -> None:

self.on_connect_callback(reconnect)


try:
reconnect = True
resp = RequestFormatter.request("get", host, verify=False)
Expand Down
22 changes: 22 additions & 0 deletions Framework/module_installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,28 @@ def get_req_list():
req_list.append(i.strip())
return req_list

def check_min_python_version(min_python_version,show_warning):
import warnings
version, subversion = list(map(int, min_python_version.split('.')))
# Minimum required version
required_version = (version, subversion)

# Get the current Python version
current_version = sys.version_info[:3]

# Check if the current version is less than the required version
if current_version < required_version:
if not show_warning:
sys.stderr.write(f"Python {required_version[0]}.{required_version[1]} or higher is required.\n")
sys.exit(1)
else:
warning_message = (
f"Warning: You are using Python {current_version[0]}.{current_version[1]}. "
f"Python {required_version[0]}.{required_version[1]} or higher is recommended. Please update your Python version by 28-02-2025."
)
# Show warning in yellow
warnings.warn(f"\033[93m{warning_message}\033[0m")

def install_missing_modules(req_list=None):
"""
Purpose: This function will check all the installed modules, compare with what is in requirements-win.txt file
Expand Down
61 changes: 29 additions & 32 deletions node_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from datetime import datetime as dt

import time

import threading

# Disable WebdriverManager SSL verification.
os.environ['WDM_SSL_VERIFY'] = '0'
Expand All @@ -27,7 +27,9 @@
print(version_path.read_text().strip())
print("[Python version]")
print("Python " + platform.python_version() + "(" + platform.architecture()[0] + ")\n")
from Framework.module_installer import install_missing_modules,update_outdated_modules
from Framework.module_installer import check_min_python_version, install_missing_modules,update_outdated_modules

check_min_python_version(min_python_version="3.11",show_warning=True)
install_missing_modules()

# Conditionally monkey-patch datetime module to include the `fromisoformat` method.
Expand Down Expand Up @@ -389,6 +391,10 @@ def Login(cli=False, run_once=False, log_dir=None):
}
)
node_id = CommonUtil.MachineInfo().getLocalUser().lower()
from Framework.MainDriverApi import retry_failed_report_upload
report_thread = threading.Thread(target=retry_failed_report_upload, daemon=True)
report_thread.start()

RunProcess(node_id, run_once=run_once, log_dir=log_dir)

if run_once:
Expand Down Expand Up @@ -926,39 +932,30 @@ def get_subfolders_created_before_n_days(folder_path, log_delete_interval):

folder_path = os.path.dirname(os.path.abspath(__file__)).replace(os.sep + "Framework", os.sep + '') + os.sep + 'AutomationLog'
log_delete_interval = ConfigModule.get_config_value("Advanced Options", "log_delete_interval")
if log_delete_interval:
auto_log_subfolders = get_subfolders_created_before_n_days(folder_path,int(log_delete_interval))
auto_log_subfolders = [subfolder for subfolder in auto_log_subfolders if subfolder not in ['attachments','attachments_db','outdated_modules.json','temp_config.ini']]

if auto_log_subfolders:
# By default set the automation log delete interval to 7 days
if not isinstance(log_delete_interval,int):
log_delete_interval = 7
else:
if log_delete_interval <= 0:
log_delete_interval = 7

def delete_old_automationlog_folders():
while True:
auto_log_subfolders = get_subfolders_created_before_n_days(folder_path,int(log_delete_interval))
auto_log_subfolders = [subfolder for subfolder in auto_log_subfolders if subfolder not in ['attachments','attachments_db','outdated_modules.json','temp_config.ini','failed_reports']]

for subfolder in auto_log_subfolders:
shutil.rmtree(subfolder)
print(f'automation_log_cleanup: deleted {len(auto_log_subfolders)} that are older than {log_delete_interval} days')

folder_path = os.path.dirname(os.path.abspath(__file__)).replace(os.sep + "Framework",
os.sep + '') + os.sep + 'AutomationLog'
log_date_str = config.get('Advanced Options', {}).get('last_log_delete_date', '')
log_delete_interval = config.get('Advanced Options', {}).get('log_delete_interval', '')
if log_date_str:
log_config_date = date.fromisoformat(log_date_str)
current_date = datetime.date.today()
time_difference = (current_date - log_config_date).days
if time_difference > int(log_delete_interval):
print("Cleaning Up AutomationLog Folder...")
for root, dirs, files in os.walk(folder_path, topdown=False):
for dir_name in dirs:
folder = os.path.join(root, dir_name)
shutil.rmtree(folder)
config.setdefault('Advanced Options', {})['last_log_delete_date'] = str(date.today())
config.write()
else:
pass
# remaining_time = 7 - time_difference
# print(f"AutomationLog Folder will be deleted after {remaining_time+1} Days")
else:
config.setdefault('Advanced Options', {})['last_log_delete_date'] = str(date.today())
config.write()
# print("AutomationLog Folder Not Found")
if auto_log_subfolders:
print(f'automation_log_cleanup: deleted {len(auto_log_subfolders)} that are older than {log_delete_interval} days')

# Check every 5 hours for old automation logs
time.sleep(60*60*5)

# Create a background thread for deleting automation log
thread = threading.Thread(target=delete_old_automationlog_folders, daemon=True)
thread.start()

if show_browser_log:
CommonUtil.show_browser_log = True
Expand Down

0 comments on commit 7b6ce3c

Please sign in to comment.